diff --git a/.github/workflows/test-multibundle.yml b/.github/workflows/test-multibundle.yml
new file mode 100644
index 0000000..e7e259b
--- /dev/null
+++ b/.github/workflows/test-multibundle.yml
@@ -0,0 +1,156 @@
+name: 'Test: Multi-bundle restore'
+
+on:
+ workflow_dispatch:
+ push:
+ branches: [main]
+ pull_request:
+
+permissions:
+ contents: read
+
+jobs:
+ make-bundles:
+ name: 'Pack bundle (${{ matrix.org }})'
+ runs-on: ubuntu-latest
+ strategy:
+ matrix:
+ org: [alpha, beta, gamma]
+ steps:
+ - uses: actions/checkout@v4
+
+ - name: Create throwaway APM project
+ run: |
+ PROJ=/tmp/apm-test-${{ matrix.org }}
+ mkdir -p "$PROJ"
+
+ # Each matrix replica installs the same public sample package.
+ # The point of this test is to validate the multi-bundle LOOP
+ # (N pack jobs -> N artifacts -> N unpacks into one workspace),
+ # not per-org distinctness (that is verified by microsoft/apm#982
+ # against real Apps with genuinely distinct deps -- something
+ # apm-action CI cannot easily mirror without a fleet of test
+ # packages). Identical bundles also exercise the same-SHA
+ # collision path -- restore should succeed without warnings.
+ cat > "$PROJ/apm.yml" <<'YAML'
+ name: test-${{ matrix.org }}
+ version: 1.0.0
+ description: throwaway fixture for multi-bundle CI
+ dependencies:
+ apm:
+ - microsoft/apm-sample-package
+ mcp: []
+ YAML
+
+ - name: Pack bundle
+ uses: ./
+ with:
+ working-directory: /tmp/apm-test-${{ matrix.org }}
+ pack: 'true'
+ archive: 'true'
+
+ - name: Upload bundle artifact
+ uses: actions/upload-artifact@v4
+ with:
+ name: apm-test-${{ matrix.org }}
+ path: /tmp/apm-test-${{ matrix.org }}/build/*.tar.gz
+ if-no-files-found: error
+
+ restore-bundles:
+ name: 'Restore 3 bundles'
+ needs: make-bundles
+ runs-on: ubuntu-latest
+ steps:
+ - uses: actions/checkout@v4
+
+ - name: Download all test bundles
+ uses: actions/download-artifact@v4
+ with:
+ pattern: apm-test-*
+ path: /tmp/bundles
+
+ - name: Generate bundle list file
+ run: |
+ find /tmp/bundles -name '*.tar.gz' | sort > /tmp/bundle-list.txt
+ echo '--- Bundle list ---'
+ cat /tmp/bundle-list.txt
+ echo '---'
+ test -s /tmp/bundle-list.txt
+
+ - name: Restore multi-bundle
+ id: restore
+ uses: ./
+ with:
+ bundles-file: /tmp/bundle-list.txt
+ working-directory: /tmp/restore-target
+
+ - name: Assert sample package files landed
+ run: |
+ # microsoft/apm-sample-package is the same dep across all 3 orgs;
+ # whatever it ships should appear in restore-target/.github after
+ # at least one unpack succeeds. Just verify the deployment dir
+ # is non-empty (each release of the sample package may rename
+ # individual files, so a content-agnostic check is more durable).
+ if [ ! -d /tmp/restore-target/.github ]; then
+ echo "FAIL: /tmp/restore-target/.github does not exist"
+ ls -laR /tmp/restore-target || true
+ exit 1
+ fi
+ FILE_COUNT=$(find /tmp/restore-target/.github -type f | wc -l)
+ if [ "$FILE_COUNT" -lt 1 ]; then
+ echo "FAIL: no files deployed under /tmp/restore-target/.github"
+ ls -laR /tmp/restore-target || true
+ exit 1
+ fi
+ echo "OK: $FILE_COUNT files deployed under /tmp/restore-target/.github"
+ find /tmp/restore-target/.github -type f | head -20
+
+ - name: Assert bundles-restored output
+ run: |
+ RESTORED='${{ steps.restore.outputs.bundles-restored }}'
+ if [ "$RESTORED" != "3" ]; then
+ echo "FAIL: expected bundles-restored=3, got '$RESTORED'"
+ exit 1
+ fi
+ echo "OK: bundles-restored=3"
+
+ # NOTE on what this CI does and does NOT prove:
+ # - PROVES: the multi-bundle LOOP works (3 separate pack artifacts
+ # each unpack successfully into one shared workspace, no errors,
+ # bundles-restored output is correct, collision-policy banner
+ # fires).
+ # - DOES NOT PROVE: distinct-content merge across N orgs. apm
+ # bundle only includes files attributable to dependencies in
+ # apm.yml, so we cannot inject per-replica marker files into the
+ # bundle without having N genuinely distinct test packages.
+ # The real distinct-content / per-App scenario is end-to-end
+ # tested by microsoft/apm#982 against real GitHub Apps.
+
+ reject-traversal:
+ name: 'Negative test: bundles-file rejects .. traversal'
+ runs-on: ubuntu-latest
+ steps:
+ - uses: actions/checkout@v4
+
+ - name: Generate bundles-file with rejected '..' segment
+ run: |
+ mkdir -p /tmp/neg-bundles
+ cat > /tmp/neg-bundle-list.txt <<'EOF'
+ ../escape.tar.gz
+ EOF
+
+ - name: Restore must FAIL on traversal
+ id: restore-neg
+ continue-on-error: true
+ uses: ./
+ with:
+ bundles-file: /tmp/neg-bundle-list.txt
+ working-directory: /tmp/neg-restore-target
+
+ - name: Assert step failed (traversal rejected)
+ run: |
+ if [ "${{ steps.restore-neg.outcome }}" != "failure" ]; then
+ echo "FAIL: expected restore step to fail on '..' traversal, got outcome=${{ steps.restore-neg.outcome }}"
+ exit 1
+ fi
+ echo "OK: '..' traversal in bundles-file was rejected as required"
diff --git a/README.md b/README.md
index 31d5216..95744a3 100644
--- a/README.md
+++ b/README.md
@@ -72,6 +72,35 @@ Restore primitives from a bundle. The action installs APM (cached across runs) a
bundle: './*.tar.gz'
```
+
+### Multi-bundle restore (multi-org / multi-app)
+
+**Why:** when you fan out a `pack` job across N GitHub Apps (or N orgs, or N teams) you end up with N separate bundle artifacts. Without `bundles-file`, the consumer job has to call `microsoft/apm-action@v1` N times in sequence, which adds latency and obscures which install came from which source. `bundles-file` lets a single restore step merge all N bundles into one workspace in caller-specified order. See [issue #29](https://github.com/microsoft/apm-action/issues/29) for the full rationale and diagrams.
+
+**Backward compatibility:** existing single-`bundle` callers are unaffected. `bundles-file` is a new opt-in input; `pack`, `bundle`, and `bundles-file` are mutually exclusive (the action errors if more than one is set).
+
+```yaml
+# In a downstream job that consumes all bundles:
+- uses: actions/download-artifact@v4
+ with:
+ pattern: apm-*
+ path: /tmp/bundles
+
+- run: find /tmp/bundles -name '*.tar.gz' | sort > /tmp/bundle-list.txt
+
+- uses: microsoft/apm-action@v1
+ id: restore
+ with:
+ bundles-file: /tmp/bundle-list.txt
+ working-directory: /tmp/agent-workspace
+
+- run: echo "Merged ${{ steps.restore.outputs.bundles-restored }} bundles into the workspace"
+```
+
+The `bundles-restored` output reports the integer count of bundles successfully merged, which is convenient for assertions and logging in downstream steps.
+
+**Collision policy:** bundles are applied in list order; on file conflicts, later bundles overwrite earlier bundles. The action logs an explicit warning naming the bundle count before the restore loop begins, so the policy is never silent. Per-file SHA-aware collision detection is planned for v1.6.0.
+
### Cross-job artifact workflow
Pack once, restore everywhere — identical primitives across all consumer jobs.
@@ -153,7 +182,7 @@ For cross-org private repos, pass a PAT with broader scope via the `github-token
github-token: ${{ secrets.APM_PAT }}
```
-For multi-org or multi-platform scenarios, use the `env:` block for full control. An explicit `GITHUB_APM_PAT` in `env:` always wins over the auto-forwarded value:
+For multi-org or multi-platform scenarios, use the `env:` block for full control. An explicit `GITHUB_APM_PAT` in `env:` always wins over the auto-forwarded value. (For the matrix-based fan-out pattern that pairs one App per matrix replica with [`bundles-file:`](#multi-bundle-restore), see [issue #29](https://github.com/microsoft/apm-action/issues/29).)
```yaml
# Multi-org / multi-platform: full control via env block
@@ -180,6 +209,7 @@ For multi-org or multi-platform scenarios, use the `env:` block for full control
| `compile` | No | `false` | Run `apm compile` after install to generate AGENTS.md |
| `pack` | No | `false` | Pack a bundle after install (produces `.tar.gz` by default) |
| `bundle` | No | | Restore from a bundle (local path or glob). Installs APM and unpacks via `apm unpack` (verified). |
+| `bundles-file` | No | | Path to a UTF-8 text file with one bundle path per line. Restores N bundles into a single workspace in caller-specified order (last wins on collisions). Mutually exclusive with `pack` and `bundle`. |
| `target` | No | | Bundle target: `copilot`, `vscode`, `claude`, or `all` (used with `pack: true`) |
| `archive` | No | `true` | Produce `.tar.gz` instead of directory (used with `pack: true`) |
| `audit-report` | No | | Generate a SARIF audit report (hidden Unicode scanning). `apm install` already blocks critical findings; this adds reporting for Code Scanning and a markdown summary in `$GITHUB_STEP_SUMMARY`. Set to `true` for default path, or provide a custom path. |
@@ -192,6 +222,7 @@ For multi-org or multi-platform scenarios, use the `env:` block for full control
| `primitives-path` | Path where agent primitives were deployed (`.github`) |
| `bundle-path` | Path to the packed bundle (only set in pack mode) |
| `audit-report-path` | Path to the generated SARIF audit report (if `audit-report` was set) |
+| `bundles-restored` | Number of bundles successfully restored (multi-bundle mode only) |
## Third-Party Dependencies
diff --git a/action.yml b/action.yml
index a642ded..46b4f87 100644
--- a/action.yml
+++ b/action.yml
@@ -38,6 +38,17 @@ inputs:
description: 'Restore from a bundle (local path or glob pattern). Skips APM installation entirely.'
required: false
default: ''
+ bundles-file:
+ description: |
+ Path to a UTF-8 text file with one bundle path per line (paths must end
+ in '.tar.gz'). Lines starting with '#' are comments; blank lines are
+ ignored. Glob patterns are NOT expanded -- generate the list yourself
+ with 'find ... | sort' or equivalent.
+ Bundles are restored in caller-specified order (later bundles win on
+ file collisions; the action emits a warning before the loop runs so
+ the policy is explicit). Mutually exclusive with 'pack' and 'bundle'.
+ required: false
+ default: ''
target:
description: 'Bundle target: copilot, vscode, claude, or all (used with pack: true)'
required: false
@@ -64,6 +75,8 @@ outputs:
description: 'Path to the packed bundle (only set in pack mode)'
audit-report-path:
description: 'Path to the generated SARIF audit report, if audit-report was enabled'
+ bundles-restored:
+ description: 'Number of bundles successfully restored (multi-bundle mode only).'
runs:
using: 'node24'
diff --git a/dist/970.index.js b/dist/970.index.js
new file mode 100644
index 0000000..d2ae86b
--- /dev/null
+++ b/dist/970.index.js
@@ -0,0 +1,253 @@
+export const id = 970;
+export const ids = [970];
+export const modules = {
+
+/***/ 2970:
+/***/ ((__unused_webpack_module, __webpack_exports__, __webpack_require__) => {
+
+/* harmony export */ __webpack_require__.d(__webpack_exports__, {
+/* harmony export */ logCollisionPolicy: () => (/* binding */ logCollisionPolicy),
+/* harmony export */ parseBundleListFile: () => (/* binding */ parseBundleListFile),
+/* harmony export */ previewBundleFiles: () => (/* binding */ previewBundleFiles),
+/* harmony export */ restoreMultiBundles: () => (/* binding */ restoreMultiBundles)
+/* harmony export */ });
+/* unused harmony exports TOKEN_ENV_DENYLIST, DEFAULT_MAX_BUNDLES, buildStrippedEnv */
+/* harmony import */ var _actions_core__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(6058);
+/* harmony import */ var _actions_exec__WEBPACK_IMPORTED_MODULE_1__ = __webpack_require__(382);
+/* harmony import */ var fs__WEBPACK_IMPORTED_MODULE_2__ = __webpack_require__(9896);
+/* harmony import */ var fs__WEBPACK_IMPORTED_MODULE_2___default = /*#__PURE__*/__webpack_require__.n(fs__WEBPACK_IMPORTED_MODULE_2__);
+/* harmony import */ var path__WEBPACK_IMPORTED_MODULE_3__ = __webpack_require__(6928);
+/* harmony import */ var path__WEBPACK_IMPORTED_MODULE_3___default = /*#__PURE__*/__webpack_require__.n(path__WEBPACK_IMPORTED_MODULE_3__);
+// Gap #1 resolution: `apm unpack --dry-run` IS available in the installed apm CLI
+// (verified via `apm unpack --help` during Phase 2). However, full collision
+// detection across N bundles is deferred to a follow-up PR per the design plan;
+// `previewBundleFiles` is therefore stubbed to return an empty CollisionReport.
+
+
+
+
+/**
+ * Env-var denylist stripped from the apm unpack subprocess (B7).
+ *
+ * Includes:
+ * - APM-recognised credentials: GITHUB_APM_PAT, ADO_APM_PAT.
+ * - GitHub CLI / Actions token aliases that APM may auto-detect now or in
+ * future releases: GITHUB_TOKEN, GH_TOKEN.
+ * - Runner-scoped tokens with high blast radius if exfiltrated by a malicious
+ * bundle's hypothetical lifecycle hook: ACTIONS_RUNTIME_TOKEN (cache write),
+ * ACTIONS_ID_TOKEN_REQUEST_TOKEN (OIDC federation).
+ *
+ * Defence-in-depth: `apm unpack` itself does not need any of these, and the
+ * restore-side multi-bundle path performs no authenticated network calls.
+ */
+const TOKEN_ENV_DENYLIST = [
+ 'GITHUB_APM_PAT',
+ 'ADO_APM_PAT',
+ 'GITHUB_TOKEN',
+ 'GH_TOKEN',
+ 'ACTIONS_RUNTIME_TOKEN',
+ 'ACTIONS_ID_TOKEN_REQUEST_TOKEN',
+];
+/** Default cap on the number of bundles a single list file may contain (B5). */
+const DEFAULT_MAX_BUNDLES = 64;
+/**
+ * Build a sanitised env for the apm unpack subprocess: process.env with the
+ * token denylist removed. Defence-in-depth so a malicious bundle's lifecycle
+ * hooks (if any are ever introduced) cannot exfiltrate the runner's auth.
+ */
+function buildStrippedEnv() {
+ // process.env is Record. Filter undefined-valued
+ // entries up-front so the returned record is genuinely Record
+ // without an unsafe `as` cast that hides the underlying type mismatch.
+ const env = Object.fromEntries(Object.entries(process.env).filter((entry) => entry[1] !== undefined));
+ for (const key of TOKEN_ENV_DENYLIST) {
+ delete env[key];
+ }
+ return env;
+}
+/**
+ * Parse a newline-separated bundle list file into validated, deduped paths.
+ *
+ * Rules:
+ * - File must exist and be readable (hard error with path + cwd).
+ * - UTF-8 only (hard error on decode failure).
+ * - Lines starting with '#' are comments (skipped).
+ * - Blank lines are skipped.
+ * - '..' segment in any path -> reject with line number (B3).
+ * - Relative paths resolved against opts.workspaceDir; rejected if they escape it (B1).
+ * - Absolute paths allowed (matches existing bundle: behaviour, B1).
+ * - Each entry must end in `.tar.gz` (defence-in-depth + clear early failure
+ * if a user accidentally points at a directory or wrong file). Glob patterns
+ * are NOT expanded; use `find ... | sort` to generate the list yourself.
+ * - Empty list after stripping -> hard error.
+ * - Duplicates deduped silently (first occurrence wins).
+ * - Cap at opts.maxBundles (default 64, env APM_MAX_BUNDLES) (B5).
+ */
+function parseBundleListFile(filePath, opts) {
+ const cwd = process.cwd();
+ const resolvedListPath = path__WEBPACK_IMPORTED_MODULE_3__.isAbsolute(filePath) ? filePath : path__WEBPACK_IMPORTED_MODULE_3__.resolve(cwd, filePath);
+ if (!fs__WEBPACK_IMPORTED_MODULE_2__.existsSync(resolvedListPath)) {
+ throw new Error(`bundles-file not found: ${filePath} (resolved: ${resolvedListPath}, cwd: ${cwd})`);
+ }
+ // Read as Buffer first so we can validate UTF-8 (B2).
+ let raw;
+ try {
+ raw = fs__WEBPACK_IMPORTED_MODULE_2__.readFileSync(resolvedListPath);
+ }
+ catch (e) {
+ const msg = e instanceof Error ? e.message : String(e);
+ throw new Error(`bundles-file unreadable: ${resolvedListPath}: ${msg}`);
+ }
+ // Strict UTF-8 decode using TextDecoder with fatal: true.
+ let content;
+ try {
+ content = new TextDecoder('utf-8', { fatal: true }).decode(raw);
+ }
+ catch {
+ throw new Error(`bundles-file is not valid UTF-8: ${resolvedListPath}`);
+ }
+ const workspaceDir = opts?.workspaceDir
+ ?? process.env.GITHUB_WORKSPACE
+ ?? cwd;
+ const resolvedWorkspace = path__WEBPACK_IMPORTED_MODULE_3__.resolve(workspaceDir);
+ const envCap = parseInt(process.env.APM_MAX_BUNDLES || '', 10);
+ const maxBundles = Number.isFinite(envCap) && envCap > 0
+ ? envCap
+ : (opts?.maxBundles ?? DEFAULT_MAX_BUNDLES);
+ const lines = content.split(/\r?\n/);
+ const seen = new Set();
+ const result = [];
+ for (let i = 0; i < lines.length; i++) {
+ const lineNum = i + 1;
+ const trimmed = lines[i].trim();
+ if (!trimmed)
+ continue;
+ if (trimmed.startsWith('#'))
+ continue;
+ // Reject any '..' segment before resolving (B3). Normalise both '/' and '\'.
+ const segments = trimmed.split(/[\\/]+/);
+ if (segments.some(seg => seg === '..')) {
+ throw new Error(`bundles-file line ${lineNum}: rejected '..' segment in path: ${trimmed}`);
+ }
+ // Require .tar.gz extension. Globs are not expanded; bare paths only.
+ // Catches mis-configured list files (typo, directory, or wildcard left
+ // unexpanded) at parse time rather than surfacing as a confusing tar error.
+ if (!trimmed.toLowerCase().endsWith('.tar.gz')) {
+ throw new Error(`bundles-file line ${lineNum}: entry must end in '.tar.gz' `
+ + `(globs are not expanded; use find or ls to generate the list): ${trimmed}`);
+ }
+ const isAbs = path__WEBPACK_IMPORTED_MODULE_3__.isAbsolute(trimmed);
+ const resolved = isAbs ? path__WEBPACK_IMPORTED_MODULE_3__.resolve(trimmed) : path__WEBPACK_IMPORTED_MODULE_3__.resolve(resolvedWorkspace, trimmed);
+ // Workspace escape check (B1) -- relative paths only. Absolute paths are
+ // user-explicit and allowed outside the workspace (mirrors bundler.ts).
+ if (!isAbs) {
+ const rel = path__WEBPACK_IMPORTED_MODULE_3__.relative(resolvedWorkspace, resolved);
+ if (rel.startsWith('..') || path__WEBPACK_IMPORTED_MODULE_3__.isAbsolute(rel)) {
+ throw new Error(`bundles-file line ${lineNum}: relative path escapes workspace ${resolvedWorkspace}: ${trimmed}`);
+ }
+ }
+ if (seen.has(resolved))
+ continue;
+ seen.add(resolved);
+ result.push(resolved);
+ }
+ if (result.length === 0) {
+ throw new Error(`bundles-file is empty after stripping comments and blank lines: ${resolvedListPath}`);
+ }
+ if (result.length > maxBundles) {
+ throw new Error(`bundles-file contains ${result.length} bundles (max ${maxBundles})`);
+ }
+ return result;
+}
+/**
+ * Preview file collisions across N bundles without extracting.
+ *
+ * NOTE: Stubbed for v1.5.0 -- returns an empty CollisionReport. Full
+ * implementation (which would shell out to `apm unpack --dry-run` and
+ * aggregate file lists across bundles, distinguishing same-SHA from
+ * different-SHA overlaps) is planned for v1.6.0. The restore loop is NOT
+ * blocked on this; the policy is documented up-front via
+ * `logCollisionPolicy()` so users are not surprised by silent overwrites.
+ *
+ * The function is wired into the runner today so its call site is real,
+ * not dead code -- the v1.6.0 follow-up only swaps the implementation.
+ */
+async function previewBundleFiles(bundles) {
+ void bundles;
+ _actions_core__WEBPACK_IMPORTED_MODULE_0__/* .debug */ .Yz('previewBundleFiles: dry-run aggregation not yet implemented; returning empty report');
+ return { sameSha: [], differentSha: [] };
+}
+/**
+ * Emit a single, explicit policy banner BEFORE the restore loop runs so the
+ * user is never surprised by silent overwrites. No-op for the single-bundle
+ * case (no possible collisions). Intentionally `core.warning` not `core.info`
+ * so it is annotated visibly in the GitHub Actions summary.
+ */
+function logCollisionPolicy(bundleCount) {
+ if (bundleCount <= 1)
+ return;
+ _actions_core__WEBPACK_IMPORTED_MODULE_0__/* .warning */ .$e(`Multi-bundle restore: ${bundleCount} bundles will be applied in list order. `
+ + `On file conflicts, later bundles overwrite earlier bundles silently. `
+ + `Per-file SHA collision detection is planned for v1.6.0. `
+ + `Until then, ensure the bundle list is in your intended precedence order.`);
+}
+/**
+ * Restore N bundles into the same workspace directory, in caller-specified order.
+ *
+ * - Verifies `apm` is on PATH (B4: hard fail, no fallback).
+ * - Loops through bundles in order, calling `apm unpack -o `.
+ * - Subprocess env has GITHUB_APM_PAT, ADO_APM_PAT, GITHUB_TOKEN stripped (B7).
+ * - Subprocess uses argv array, not shell string (B8).
+ * - Fail-fast: if bundle K fails, throw with index K, path, and stderr.
+ * - Returns count + empty CollisionReport (collision detection deferred).
+ *
+ * @param bundles Ordered array of absolute bundle paths (from parseBundleListFile).
+ * @param outputDir Workspace directory to restore into.
+ */
+async function restoreMultiBundles(bundles, outputDir) {
+ // B4: hard-fail if apm is not on PATH. Caller is expected to have invoked
+ // ensureApmInstalled() already; this is a defensive check, not a fallback.
+ const apmAvailable = await _actions_exec__WEBPACK_IMPORTED_MODULE_1__/* .exec */ .m('apm', ['--version'], {
+ ignoreReturnCode: true,
+ silent: true,
+ }).catch(() => 1) === 0;
+ if (!apmAvailable) {
+ throw new Error('apm CLI not found on PATH. Multi-bundle restore requires APM to be installed; '
+ + 'ensure ensureApmInstalled() ran before restoreMultiBundles().');
+ }
+ const resolvedOutput = path__WEBPACK_IMPORTED_MODULE_3__.resolve(outputDir);
+ const env = buildStrippedEnv();
+ const total = bundles.length;
+ for (let i = 0; i < total; i++) {
+ const bundle = bundles[i];
+ const human = `bundle ${i + 1} of ${total}`;
+ _actions_core__WEBPACK_IMPORTED_MODULE_0__/* .info */ .pq(`[${human}] Unpacking: ${bundle}`);
+ let stderr = '';
+ const rc = await _actions_exec__WEBPACK_IMPORTED_MODULE_1__/* .exec */ .m('apm', ['unpack', bundle, '-o', resolvedOutput], {
+ ignoreReturnCode: true,
+ env,
+ listeners: {
+ stderr: (data) => { stderr += data.toString(); },
+ },
+ });
+ if (rc !== 0) {
+ const tail = stderr.trim().split(/\r?\n/).slice(-10).join('\n');
+ throw new Error(`apm unpack failed for ${human} (path: ${bundle}, exit code: ${rc})`
+ + (tail ? `\nstderr:\n${tail}` : ''));
+ }
+ // Per-bundle confirmation so a stalled run is debuggable from the log
+ // alone without re-reading the surrounding 'Unpacking' lines.
+ _actions_core__WEBPACK_IMPORTED_MODULE_0__/* .info */ .pq(`[${human}] OK`);
+ }
+ return {
+ count: total,
+ collisions: { sameSha: [], differentSha: [] },
+ };
+}
+
+
+/***/ })
+
+};
+
+//# sourceMappingURL=970.index.js.map
\ No newline at end of file
diff --git a/dist/index.js b/dist/index.js
index 1fa33bb..d36b083 100644
--- a/dist/index.js
+++ b/dist/index.js
@@ -31667,6 +31667,20 @@ module.exports = __WEBPACK_EXTERNAL_createRequire(import.meta.url)("assert");
/***/ }),
+/***/ 5317:
+/***/ ((module) => {
+
+module.exports = __WEBPACK_EXTERNAL_createRequire(import.meta.url)("child_process");
+
+/***/ }),
+
+/***/ 6982:
+/***/ ((module) => {
+
+module.exports = __WEBPACK_EXTERNAL_createRequire(import.meta.url)("crypto");
+
+/***/ }),
+
/***/ 4434:
/***/ ((module) => {
@@ -31674,6 +31688,13 @@ module.exports = __WEBPACK_EXTERNAL_createRequire(import.meta.url)("events");
/***/ }),
+/***/ 9896:
+/***/ ((module) => {
+
+module.exports = __WEBPACK_EXTERNAL_createRequire(import.meta.url)("fs");
+
+/***/ }),
+
/***/ 8611:
/***/ ((module) => {
@@ -31835,6 +31856,13 @@ module.exports = __WEBPACK_EXTERNAL_createRequire(import.meta.url)("node:zlib");
/***/ }),
+/***/ 857:
+/***/ ((module) => {
+
+module.exports = __WEBPACK_EXTERNAL_createRequire(import.meta.url)("os");
+
+/***/ }),
+
/***/ 6928:
/***/ ((module) => {
@@ -31861,108 +31889,30 @@ module.exports = __WEBPACK_EXTERNAL_createRequire(import.meta.url)("tls");
module.exports = __WEBPACK_EXTERNAL_createRequire(import.meta.url)("util");
-/***/ })
+/***/ }),
-/******/ });
-/************************************************************************/
-/******/ // The module cache
-/******/ var __webpack_module_cache__ = {};
-/******/
-/******/ // The require function
-/******/ function __nccwpck_require__(moduleId) {
-/******/ // Check if module is in cache
-/******/ var cachedModule = __webpack_module_cache__[moduleId];
-/******/ if (cachedModule !== undefined) {
-/******/ return cachedModule.exports;
-/******/ }
-/******/ // Create a new module (and put it into the cache)
-/******/ var module = __webpack_module_cache__[moduleId] = {
-/******/ // no module.id needed
-/******/ // no module.loaded needed
-/******/ exports: {}
-/******/ };
-/******/
-/******/ // Execute the module function
-/******/ var threw = true;
-/******/ try {
-/******/ __webpack_modules__[moduleId](module, module.exports, __nccwpck_require__);
-/******/ threw = false;
-/******/ } finally {
-/******/ if(threw) delete __webpack_module_cache__[moduleId];
-/******/ }
-/******/
-/******/ // Return the exports of the module
-/******/ return module.exports;
-/******/ }
-/******/
-/************************************************************************/
-/******/ /* webpack/runtime/create fake namespace object */
-/******/ (() => {
-/******/ var getProto = Object.getPrototypeOf ? (obj) => (Object.getPrototypeOf(obj)) : (obj) => (obj.__proto__);
-/******/ var leafPrototypes;
-/******/ // create a fake namespace object
-/******/ // mode & 1: value is a module id, require it
-/******/ // mode & 2: merge all properties of value into the ns
-/******/ // mode & 4: return value when already ns object
-/******/ // mode & 16: return value when it's Promise-like
-/******/ // mode & 8|1: behave like require
-/******/ __nccwpck_require__.t = function(value, mode) {
-/******/ if(mode & 1) value = this(value);
-/******/ if(mode & 8) return value;
-/******/ if(typeof value === 'object' && value) {
-/******/ if((mode & 4) && value.__esModule) return value;
-/******/ if((mode & 16) && typeof value.then === 'function') return value;
-/******/ }
-/******/ var ns = Object.create(null);
-/******/ __nccwpck_require__.r(ns);
-/******/ var def = {};
-/******/ leafPrototypes = leafPrototypes || [null, getProto({}), getProto([]), getProto(getProto)];
-/******/ for(var current = mode & 2 && value; typeof current == 'object' && !~leafPrototypes.indexOf(current); current = getProto(current)) {
-/******/ Object.getOwnPropertyNames(current).forEach((key) => (def[key] = () => (value[key])));
-/******/ }
-/******/ def['default'] = () => (value);
-/******/ __nccwpck_require__.d(ns, def);
-/******/ return ns;
-/******/ };
-/******/ })();
-/******/
-/******/ /* webpack/runtime/define property getters */
-/******/ (() => {
-/******/ // define getter functions for harmony exports
-/******/ __nccwpck_require__.d = (exports, definition) => {
-/******/ for(var key in definition) {
-/******/ if(__nccwpck_require__.o(definition, key) && !__nccwpck_require__.o(exports, key)) {
-/******/ Object.defineProperty(exports, key, { enumerable: true, get: definition[key] });
-/******/ }
-/******/ }
-/******/ };
-/******/ })();
-/******/
-/******/ /* webpack/runtime/hasOwnProperty shorthand */
-/******/ (() => {
-/******/ __nccwpck_require__.o = (obj, prop) => (Object.prototype.hasOwnProperty.call(obj, prop))
-/******/ })();
-/******/
-/******/ /* webpack/runtime/make namespace object */
-/******/ (() => {
-/******/ // define __esModule on exports
-/******/ __nccwpck_require__.r = (exports) => {
-/******/ if(typeof Symbol !== 'undefined' && Symbol.toStringTag) {
-/******/ Object.defineProperty(exports, Symbol.toStringTag, { value: 'Module' });
-/******/ }
-/******/ Object.defineProperty(exports, '__esModule', { value: true });
-/******/ };
-/******/ })();
-/******/
-/******/ /* webpack/runtime/compat */
-/******/
-/******/ if (typeof __nccwpck_require__ !== 'undefined') __nccwpck_require__.ab = new URL('.', import.meta.url).pathname.slice(import.meta.url.match(/^file:\/\/\/\w:/) ? 1 : 0, -1) + "/";
-/******/
-/************************************************************************/
-var __webpack_exports__ = {};
+/***/ 6058:
+/***/ ((__unused_webpack___webpack_module__, __webpack_exports__, __nccwpck_require__) => {
+
+
+// EXPORTS
+__nccwpck_require__.d(__webpack_exports__, {
+ fM: () => (/* binding */ addPath),
+ Yz: () => (/* binding */ core_debug),
+ V4: () => (/* binding */ getInput),
+ pq: () => (/* binding */ info),
+ _o: () => (/* binding */ isDebug),
+ C1: () => (/* binding */ setFailed),
+ uH: () => (/* binding */ setOutput),
+ Pq: () => (/* binding */ core_setSecret),
+ z: () => (/* reexport */ summary),
+ $e: () => (/* binding */ warning)
+});
-;// CONCATENATED MODULE: external "os"
-const external_os_namespaceObject = __WEBPACK_EXTERNAL_createRequire(import.meta.url)("os");
+// UNUSED EXPORTS: ExitCode, endGroup, error, exportVariable, getBooleanInput, getIDToken, getMultilineInput, getState, group, markdownSummary, notice, platform, saveState, setCommandEcho, startGroup, toPlatformPath, toPosixPath, toWin32Path
+
+// EXTERNAL MODULE: external "os"
+var external_os_ = __nccwpck_require__(857);
;// CONCATENATED MODULE: ./node_modules/@actions/core/lib/utils.js
// We use any as a valid input type
/* eslint-disable @typescript-eslint/no-explicit-any */
@@ -32037,7 +31987,7 @@ function utils_toCommandProperties(annotationProperties) {
*/
function command_issueCommand(command, properties, message) {
const cmd = new Command(command, properties, message);
- process.stdout.write(cmd.toString() + external_os_namespaceObject.EOL);
+ process.stdout.write(cmd.toString() + external_os_.EOL);
}
function command_issue(name, message = '') {
command_issueCommand(name, {}, message);
@@ -32091,10 +32041,10 @@ function escapeProperty(s) {
.replace(/,/g, '%2C');
}
//# sourceMappingURL=command.js.map
-;// CONCATENATED MODULE: external "crypto"
-const external_crypto_namespaceObject = __WEBPACK_EXTERNAL_createRequire(import.meta.url)("crypto");
-;// CONCATENATED MODULE: external "fs"
-const external_fs_namespaceObject = __WEBPACK_EXTERNAL_createRequire(import.meta.url)("fs");
+// EXTERNAL MODULE: external "crypto"
+var external_crypto_ = __nccwpck_require__(6982);
+// EXTERNAL MODULE: external "fs"
+var external_fs_ = __nccwpck_require__(9896);
;// CONCATENATED MODULE: ./node_modules/@actions/core/lib/file-command.js
// For internal use, subject to change.
// We use any as a valid input type
@@ -32108,15 +32058,15 @@ function file_command_issueFileCommand(command, message) {
if (!filePath) {
throw new Error(`Unable to find environment variable for file command ${command}`);
}
- if (!external_fs_namespaceObject.existsSync(filePath)) {
+ if (!external_fs_.existsSync(filePath)) {
throw new Error(`Missing file at path: ${filePath}`);
}
- external_fs_namespaceObject.appendFileSync(filePath, `${utils_toCommandValue(message)}${external_os_namespaceObject.EOL}`, {
+ external_fs_.appendFileSync(filePath, `${utils_toCommandValue(message)}${external_os_.EOL}`, {
encoding: 'utf8'
});
}
function file_command_prepareKeyValueMessage(key, value) {
- const delimiter = `ghadelimiter_${external_crypto_namespaceObject.randomUUID()}`;
+ const delimiter = `ghadelimiter_${external_crypto_.randomUUID()}`;
const convertedValue = utils_toCommandValue(value);
// These should realistically never happen, but just in case someone finds a
// way to exploit uuid generation let's not allow keys or values that contain
@@ -32127,115 +32077,91 @@ function file_command_prepareKeyValueMessage(key, value) {
if (convertedValue.includes(delimiter)) {
throw new Error(`Unexpected input: value should not contain the delimiter "${delimiter}"`);
}
- return `${key}<<${delimiter}${external_os_namespaceObject.EOL}${convertedValue}${external_os_namespaceObject.EOL}${delimiter}`;
+ return `${key}<<${delimiter}${external_os_.EOL}${convertedValue}${external_os_.EOL}${delimiter}`;
}
//# sourceMappingURL=file-command.js.map
// EXTERNAL MODULE: external "path"
var external_path_ = __nccwpck_require__(6928);
-// EXTERNAL MODULE: external "http"
-var external_http_ = __nccwpck_require__(8611);
-var external_http_namespaceObject = /*#__PURE__*/__nccwpck_require__.t(external_http_, 2);
-// EXTERNAL MODULE: external "https"
-var external_https_ = __nccwpck_require__(5692);
-var external_https_namespaceObject = /*#__PURE__*/__nccwpck_require__.t(external_https_, 2);
-;// CONCATENATED MODULE: ./node_modules/@actions/http-client/lib/proxy.js
-function getProxyUrl(reqUrl) {
- const usingSsl = reqUrl.protocol === 'https:';
- if (checkBypass(reqUrl)) {
- return undefined;
+// EXTERNAL MODULE: ./node_modules/@actions/http-client/lib/index.js + 1 modules
+var lib = __nccwpck_require__(4942);
+;// CONCATENATED MODULE: ./node_modules/@actions/http-client/lib/auth.js
+var __awaiter = (undefined && undefined.__awaiter) || function (thisArg, _arguments, P, generator) {
+ function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
+ return new (P || (P = Promise))(function (resolve, reject) {
+ function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
+ function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
+ function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
+ step((generator = generator.apply(thisArg, _arguments || [])).next());
+ });
+};
+class BasicCredentialHandler {
+ constructor(username, password) {
+ this.username = username;
+ this.password = password;
}
- const proxyVar = (() => {
- if (usingSsl) {
- return process.env['https_proxy'] || process.env['HTTPS_PROXY'];
- }
- else {
- return process.env['http_proxy'] || process.env['HTTP_PROXY'];
- }
- })();
- if (proxyVar) {
- try {
- return new DecodedURL(proxyVar);
- }
- catch (_a) {
- if (!proxyVar.startsWith('http://') && !proxyVar.startsWith('https://'))
- return new DecodedURL(`http://${proxyVar}`);
+ prepareRequest(options) {
+ if (!options.headers) {
+ throw Error('The request has no headers');
}
+ options.headers['Authorization'] = `Basic ${Buffer.from(`${this.username}:${this.password}`).toString('base64')}`;
}
- else {
- return undefined;
+ // This handler cannot handle 401
+ canHandleAuthentication() {
+ return false;
+ }
+ handleAuthentication() {
+ return __awaiter(this, void 0, void 0, function* () {
+ throw new Error('not implemented');
+ });
}
}
-function checkBypass(reqUrl) {
- if (!reqUrl.hostname) {
- return false;
+class auth_BearerCredentialHandler {
+ constructor(token) {
+ this.token = token;
}
- const reqHost = reqUrl.hostname;
- if (isLoopbackAddress(reqHost)) {
- return true;
+ // currently implements pre-authorization
+ // TODO: support preAuth = false where it hooks on 401
+ prepareRequest(options) {
+ if (!options.headers) {
+ throw Error('The request has no headers');
+ }
+ options.headers['Authorization'] = `Bearer ${this.token}`;
}
- const noProxy = process.env['no_proxy'] || process.env['NO_PROXY'] || '';
- if (!noProxy) {
+ // This handler cannot handle 401
+ canHandleAuthentication() {
return false;
}
- // Determine the request port
- let reqPort;
- if (reqUrl.port) {
- reqPort = Number(reqUrl.port);
+ handleAuthentication() {
+ return __awaiter(this, void 0, void 0, function* () {
+ throw new Error('not implemented');
+ });
}
- else if (reqUrl.protocol === 'http:') {
- reqPort = 80;
+}
+class PersonalAccessTokenCredentialHandler {
+ constructor(token) {
+ this.token = token;
}
- else if (reqUrl.protocol === 'https:') {
- reqPort = 443;
+ // currently implements pre-authorization
+ // TODO: support preAuth = false where it hooks on 401
+ prepareRequest(options) {
+ if (!options.headers) {
+ throw Error('The request has no headers');
+ }
+ options.headers['Authorization'] = `Basic ${Buffer.from(`PAT:${this.token}`).toString('base64')}`;
}
- // Format the request hostname and hostname with port
- const upperReqHosts = [reqUrl.hostname.toUpperCase()];
- if (typeof reqPort === 'number') {
- upperReqHosts.push(`${upperReqHosts[0]}:${reqPort}`);
+ // This handler cannot handle 401
+ canHandleAuthentication() {
+ return false;
}
- // Compare request host against noproxy
- for (const upperNoProxyItem of noProxy
- .split(',')
- .map(x => x.trim().toUpperCase())
- .filter(x => x)) {
- if (upperNoProxyItem === '*' ||
- upperReqHosts.some(x => x === upperNoProxyItem ||
- x.endsWith(`.${upperNoProxyItem}`) ||
- (upperNoProxyItem.startsWith('.') &&
- x.endsWith(`${upperNoProxyItem}`)))) {
- return true;
- }
+ handleAuthentication() {
+ return __awaiter(this, void 0, void 0, function* () {
+ throw new Error('not implemented');
+ });
}
- return false;
-}
-function isLoopbackAddress(host) {
- const hostLower = host.toLowerCase();
- return (hostLower === 'localhost' ||
- hostLower.startsWith('127.') ||
- hostLower.startsWith('[::1]') ||
- hostLower.startsWith('[0:0:0:0:0:0:0:1]'));
}
-class DecodedURL extends URL {
- constructor(url, base) {
- super(url, base);
- this._decodedUsername = decodeURIComponent(super.username);
- this._decodedPassword = decodeURIComponent(super.password);
- }
- get username() {
- return this._decodedUsername;
- }
- get password() {
- return this._decodedPassword;
- }
-}
-//# sourceMappingURL=proxy.js.map
-// EXTERNAL MODULE: ./node_modules/tunnel/index.js
-var tunnel = __nccwpck_require__(770);
-// EXTERNAL MODULE: ./node_modules/undici/index.js
-var undici = __nccwpck_require__(6752);
-;// CONCATENATED MODULE: ./node_modules/@actions/http-client/lib/index.js
-/* eslint-disable @typescript-eslint/no-explicit-any */
-var __awaiter = (undefined && undefined.__awaiter) || function (thisArg, _arguments, P, generator) {
+//# sourceMappingURL=auth.js.map
+;// CONCATENATED MODULE: ./node_modules/@actions/core/lib/oidc-utils.js
+var oidc_utils_awaiter = (undefined && undefined.__awaiter) || function (thisArg, _arguments, P, generator) {
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
return new (P || (P = Promise))(function (resolve, reject) {
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
@@ -32247,691 +32173,386 @@ var __awaiter = (undefined && undefined.__awaiter) || function (thisArg, _argume
-
-
-var HttpCodes;
-(function (HttpCodes) {
- HttpCodes[HttpCodes["OK"] = 200] = "OK";
- HttpCodes[HttpCodes["MultipleChoices"] = 300] = "MultipleChoices";
- HttpCodes[HttpCodes["MovedPermanently"] = 301] = "MovedPermanently";
- HttpCodes[HttpCodes["ResourceMoved"] = 302] = "ResourceMoved";
- HttpCodes[HttpCodes["SeeOther"] = 303] = "SeeOther";
- HttpCodes[HttpCodes["NotModified"] = 304] = "NotModified";
- HttpCodes[HttpCodes["UseProxy"] = 305] = "UseProxy";
- HttpCodes[HttpCodes["SwitchProxy"] = 306] = "SwitchProxy";
- HttpCodes[HttpCodes["TemporaryRedirect"] = 307] = "TemporaryRedirect";
- HttpCodes[HttpCodes["PermanentRedirect"] = 308] = "PermanentRedirect";
- HttpCodes[HttpCodes["BadRequest"] = 400] = "BadRequest";
- HttpCodes[HttpCodes["Unauthorized"] = 401] = "Unauthorized";
- HttpCodes[HttpCodes["PaymentRequired"] = 402] = "PaymentRequired";
- HttpCodes[HttpCodes["Forbidden"] = 403] = "Forbidden";
- HttpCodes[HttpCodes["NotFound"] = 404] = "NotFound";
- HttpCodes[HttpCodes["MethodNotAllowed"] = 405] = "MethodNotAllowed";
- HttpCodes[HttpCodes["NotAcceptable"] = 406] = "NotAcceptable";
- HttpCodes[HttpCodes["ProxyAuthenticationRequired"] = 407] = "ProxyAuthenticationRequired";
- HttpCodes[HttpCodes["RequestTimeout"] = 408] = "RequestTimeout";
- HttpCodes[HttpCodes["Conflict"] = 409] = "Conflict";
- HttpCodes[HttpCodes["Gone"] = 410] = "Gone";
- HttpCodes[HttpCodes["TooManyRequests"] = 429] = "TooManyRequests";
- HttpCodes[HttpCodes["InternalServerError"] = 500] = "InternalServerError";
- HttpCodes[HttpCodes["NotImplemented"] = 501] = "NotImplemented";
- HttpCodes[HttpCodes["BadGateway"] = 502] = "BadGateway";
- HttpCodes[HttpCodes["ServiceUnavailable"] = 503] = "ServiceUnavailable";
- HttpCodes[HttpCodes["GatewayTimeout"] = 504] = "GatewayTimeout";
-})(HttpCodes || (HttpCodes = {}));
-var Headers;
-(function (Headers) {
- Headers["Accept"] = "accept";
- Headers["ContentType"] = "content-type";
-})(Headers || (Headers = {}));
-var MediaTypes;
-(function (MediaTypes) {
- MediaTypes["ApplicationJson"] = "application/json";
-})(MediaTypes || (MediaTypes = {}));
-/**
- * Returns the proxy URL, depending upon the supplied url and proxy environment variables.
- * @param serverUrl The server URL where the request will be sent. For example, https://api.github.com
- */
-function lib_getProxyUrl(serverUrl) {
- const proxyUrl = pm.getProxyUrl(new URL(serverUrl));
- return proxyUrl ? proxyUrl.href : '';
-}
-const HttpRedirectCodes = [
- HttpCodes.MovedPermanently,
- HttpCodes.ResourceMoved,
- HttpCodes.SeeOther,
- HttpCodes.TemporaryRedirect,
- HttpCodes.PermanentRedirect
-];
-const HttpResponseRetryCodes = [
- HttpCodes.BadGateway,
- HttpCodes.ServiceUnavailable,
- HttpCodes.GatewayTimeout
-];
-const RetryableHttpVerbs = ['OPTIONS', 'GET', 'DELETE', 'HEAD'];
-const ExponentialBackoffCeiling = 10;
-const ExponentialBackoffTimeSlice = 5;
-class HttpClientError extends Error {
- constructor(message, statusCode) {
- super(message);
- this.name = 'HttpClientError';
- this.statusCode = statusCode;
- Object.setPrototypeOf(this, HttpClientError.prototype);
+class oidc_utils_OidcClient {
+ static createHttpClient(allowRetry = true, maxRetry = 10) {
+ const requestOptions = {
+ allowRetries: allowRetry,
+ maxRetries: maxRetry
+ };
+ return new HttpClient('actions/oidc-client', [new BearerCredentialHandler(oidc_utils_OidcClient.getRequestToken())], requestOptions);
}
-}
-class HttpClientResponse {
- constructor(message) {
- this.message = message;
+ static getRequestToken() {
+ const token = process.env['ACTIONS_ID_TOKEN_REQUEST_TOKEN'];
+ if (!token) {
+ throw new Error('Unable to get ACTIONS_ID_TOKEN_REQUEST_TOKEN env variable');
+ }
+ return token;
}
- readBody() {
- return __awaiter(this, void 0, void 0, function* () {
- return new Promise((resolve) => __awaiter(this, void 0, void 0, function* () {
- let output = Buffer.alloc(0);
- this.message.on('data', (chunk) => {
- output = Buffer.concat([output, chunk]);
- });
- this.message.on('end', () => {
- resolve(output.toString());
- });
- }));
- });
+ static getIDTokenUrl() {
+ const runtimeUrl = process.env['ACTIONS_ID_TOKEN_REQUEST_URL'];
+ if (!runtimeUrl) {
+ throw new Error('Unable to get ACTIONS_ID_TOKEN_REQUEST_URL env variable');
+ }
+ return runtimeUrl;
}
- readBodyBuffer() {
- return __awaiter(this, void 0, void 0, function* () {
- return new Promise((resolve) => __awaiter(this, void 0, void 0, function* () {
- const chunks = [];
- this.message.on('data', (chunk) => {
- chunks.push(chunk);
- });
- this.message.on('end', () => {
- resolve(Buffer.concat(chunks));
- });
- }));
+ static getCall(id_token_url) {
+ return oidc_utils_awaiter(this, void 0, void 0, function* () {
+ var _a;
+ const httpclient = oidc_utils_OidcClient.createHttpClient();
+ const res = yield httpclient
+ .getJson(id_token_url)
+ .catch(error => {
+ throw new Error(`Failed to get ID Token. \n
+ Error Code : ${error.statusCode}\n
+ Error Message: ${error.message}`);
+ });
+ const id_token = (_a = res.result) === null || _a === void 0 ? void 0 : _a.value;
+ if (!id_token) {
+ throw new Error('Response json body do not have ID Token field');
+ }
+ return id_token;
});
}
-}
-function isHttps(requestUrl) {
- const parsedUrl = new URL(requestUrl);
- return parsedUrl.protocol === 'https:';
-}
-class lib_HttpClient {
- constructor(userAgent, handlers, requestOptions) {
- this._ignoreSslError = false;
- this._allowRedirects = true;
- this._allowRedirectDowngrade = false;
- this._maxRedirects = 50;
- this._allowRetries = false;
- this._maxRetries = 1;
- this._keepAlive = false;
- this._disposed = false;
- this.userAgent = this._getUserAgentWithOrchestrationId(userAgent);
- this.handlers = handlers || [];
- this.requestOptions = requestOptions;
- if (requestOptions) {
- if (requestOptions.ignoreSslError != null) {
- this._ignoreSslError = requestOptions.ignoreSslError;
- }
- this._socketTimeout = requestOptions.socketTimeout;
- if (requestOptions.allowRedirects != null) {
- this._allowRedirects = requestOptions.allowRedirects;
+ static getIDToken(audience) {
+ return oidc_utils_awaiter(this, void 0, void 0, function* () {
+ try {
+ // New ID Token is requested from action service
+ let id_token_url = oidc_utils_OidcClient.getIDTokenUrl();
+ if (audience) {
+ const encodedAudience = encodeURIComponent(audience);
+ id_token_url = `${id_token_url}&audience=${encodedAudience}`;
+ }
+ debug(`ID token url is ${id_token_url}`);
+ const id_token = yield oidc_utils_OidcClient.getCall(id_token_url);
+ setSecret(id_token);
+ return id_token;
}
- if (requestOptions.allowRedirectDowngrade != null) {
- this._allowRedirectDowngrade = requestOptions.allowRedirectDowngrade;
+ catch (error) {
+ throw new Error(`Error message: ${error.message}`);
}
- if (requestOptions.maxRedirects != null) {
- this._maxRedirects = Math.max(requestOptions.maxRedirects, 0);
+ });
+ }
+}
+//# sourceMappingURL=oidc-utils.js.map
+;// CONCATENATED MODULE: ./node_modules/@actions/core/lib/summary.js
+var summary_awaiter = (undefined && undefined.__awaiter) || function (thisArg, _arguments, P, generator) {
+ function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
+ return new (P || (P = Promise))(function (resolve, reject) {
+ function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
+ function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
+ function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
+ step((generator = generator.apply(thisArg, _arguments || [])).next());
+ });
+};
+
+
+const { access, appendFile, writeFile } = external_fs_.promises;
+const SUMMARY_ENV_VAR = 'GITHUB_STEP_SUMMARY';
+const SUMMARY_DOCS_URL = 'https://docs.github.com/actions/using-workflows/workflow-commands-for-github-actions#adding-a-job-summary';
+class Summary {
+ constructor() {
+ this._buffer = '';
+ }
+ /**
+ * Finds the summary file path from the environment, rejects if env var is not found or file does not exist
+ * Also checks r/w permissions.
+ *
+ * @returns step summary file path
+ */
+ filePath() {
+ return summary_awaiter(this, void 0, void 0, function* () {
+ if (this._filePath) {
+ return this._filePath;
}
- if (requestOptions.keepAlive != null) {
- this._keepAlive = requestOptions.keepAlive;
+ const pathFromEnv = process.env[SUMMARY_ENV_VAR];
+ if (!pathFromEnv) {
+ throw new Error(`Unable to find environment variable for $${SUMMARY_ENV_VAR}. Check if your runtime environment supports job summaries.`);
}
- if (requestOptions.allowRetries != null) {
- this._allowRetries = requestOptions.allowRetries;
+ try {
+ yield access(pathFromEnv, external_fs_.constants.R_OK | external_fs_.constants.W_OK);
}
- if (requestOptions.maxRetries != null) {
- this._maxRetries = requestOptions.maxRetries;
+ catch (_a) {
+ throw new Error(`Unable to access summary file: '${pathFromEnv}'. Check if the file has correct read/write permissions.`);
}
- }
- }
- options(requestUrl, additionalHeaders) {
- return __awaiter(this, void 0, void 0, function* () {
- return this.request('OPTIONS', requestUrl, null, additionalHeaders || {});
- });
- }
- get(requestUrl, additionalHeaders) {
- return __awaiter(this, void 0, void 0, function* () {
- return this.request('GET', requestUrl, null, additionalHeaders || {});
- });
- }
- del(requestUrl, additionalHeaders) {
- return __awaiter(this, void 0, void 0, function* () {
- return this.request('DELETE', requestUrl, null, additionalHeaders || {});
+ this._filePath = pathFromEnv;
+ return this._filePath;
});
}
- post(requestUrl, data, additionalHeaders) {
- return __awaiter(this, void 0, void 0, function* () {
- return this.request('POST', requestUrl, data, additionalHeaders || {});
- });
- }
- patch(requestUrl, data, additionalHeaders) {
- return __awaiter(this, void 0, void 0, function* () {
- return this.request('PATCH', requestUrl, data, additionalHeaders || {});
- });
- }
- put(requestUrl, data, additionalHeaders) {
- return __awaiter(this, void 0, void 0, function* () {
- return this.request('PUT', requestUrl, data, additionalHeaders || {});
- });
- }
- head(requestUrl, additionalHeaders) {
- return __awaiter(this, void 0, void 0, function* () {
- return this.request('HEAD', requestUrl, null, additionalHeaders || {});
- });
+ /**
+ * Wraps content in an HTML tag, adding any HTML attributes
+ *
+ * @param {string} tag HTML tag to wrap
+ * @param {string | null} content content within the tag
+ * @param {[attribute: string]: string} attrs key-value list of HTML attributes to add
+ *
+ * @returns {string} content wrapped in HTML element
+ */
+ wrap(tag, content, attrs = {}) {
+ const htmlAttrs = Object.entries(attrs)
+ .map(([key, value]) => ` ${key}="${value}"`)
+ .join('');
+ if (!content) {
+ return `<${tag}${htmlAttrs}>`;
+ }
+ return `<${tag}${htmlAttrs}>${content}${tag}>`;
}
- sendStream(verb, requestUrl, stream, additionalHeaders) {
- return __awaiter(this, void 0, void 0, function* () {
- return this.request(verb, requestUrl, stream, additionalHeaders);
+ /**
+ * Writes text in the buffer to the summary buffer file and empties buffer. Will append by default.
+ *
+ * @param {SummaryWriteOptions} [options] (optional) options for write operation
+ *
+ * @returns {Promise} summary instance
+ */
+ write(options) {
+ return summary_awaiter(this, void 0, void 0, function* () {
+ const overwrite = !!(options === null || options === void 0 ? void 0 : options.overwrite);
+ const filePath = yield this.filePath();
+ const writeFunc = overwrite ? writeFile : appendFile;
+ yield writeFunc(filePath, this._buffer, { encoding: 'utf8' });
+ return this.emptyBuffer();
});
}
/**
- * Gets a typed object from an endpoint
- * Be aware that not found returns a null. Other errors (4xx, 5xx) reject the promise
+ * Clears the summary buffer and wipes the summary file
+ *
+ * @returns {Summary} summary instance
*/
- getJson(requestUrl_1) {
- return __awaiter(this, arguments, void 0, function* (requestUrl, additionalHeaders = {}) {
- additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson);
- const res = yield this.get(requestUrl, additionalHeaders);
- return this._processResponse(res, this.requestOptions);
+ clear() {
+ return summary_awaiter(this, void 0, void 0, function* () {
+ return this.emptyBuffer().write({ overwrite: true });
});
}
- postJson(requestUrl_1, obj_1) {
- return __awaiter(this, arguments, void 0, function* (requestUrl, obj, additionalHeaders = {}) {
- const data = JSON.stringify(obj, null, 2);
- additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson);
- additionalHeaders[Headers.ContentType] =
- this._getExistingOrDefaultContentTypeHeader(additionalHeaders, MediaTypes.ApplicationJson);
- const res = yield this.post(requestUrl, data, additionalHeaders);
- return this._processResponse(res, this.requestOptions);
- });
+ /**
+ * Returns the current summary buffer as a string
+ *
+ * @returns {string} string of summary buffer
+ */
+ stringify() {
+ return this._buffer;
}
- putJson(requestUrl_1, obj_1) {
- return __awaiter(this, arguments, void 0, function* (requestUrl, obj, additionalHeaders = {}) {
- const data = JSON.stringify(obj, null, 2);
- additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson);
- additionalHeaders[Headers.ContentType] =
- this._getExistingOrDefaultContentTypeHeader(additionalHeaders, MediaTypes.ApplicationJson);
- const res = yield this.put(requestUrl, data, additionalHeaders);
- return this._processResponse(res, this.requestOptions);
- });
+ /**
+ * If the summary buffer is empty
+ *
+ * @returns {boolen} true if the buffer is empty
+ */
+ isEmptyBuffer() {
+ return this._buffer.length === 0;
}
- patchJson(requestUrl_1, obj_1) {
- return __awaiter(this, arguments, void 0, function* (requestUrl, obj, additionalHeaders = {}) {
- const data = JSON.stringify(obj, null, 2);
- additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson);
- additionalHeaders[Headers.ContentType] =
- this._getExistingOrDefaultContentTypeHeader(additionalHeaders, MediaTypes.ApplicationJson);
- const res = yield this.patch(requestUrl, data, additionalHeaders);
- return this._processResponse(res, this.requestOptions);
- });
+ /**
+ * Resets the summary buffer without writing to summary file
+ *
+ * @returns {Summary} summary instance
+ */
+ emptyBuffer() {
+ this._buffer = '';
+ return this;
}
/**
- * Makes a raw http request.
- * All other methods such as get, post, patch, and request ultimately call this.
- * Prefer get, del, post and patch
+ * Adds raw text to the summary buffer
+ *
+ * @param {string} text content to add
+ * @param {boolean} [addEOL=false] (optional) append an EOL to the raw text (default: false)
+ *
+ * @returns {Summary} summary instance
*/
- request(verb, requestUrl, data, headers) {
- return __awaiter(this, void 0, void 0, function* () {
- if (this._disposed) {
- throw new Error('Client has already been disposed.');
- }
- const parsedUrl = new URL(requestUrl);
- let info = this._prepareRequest(verb, parsedUrl, headers);
- // Only perform retries on reads since writes may not be idempotent.
- const maxTries = this._allowRetries && RetryableHttpVerbs.includes(verb)
- ? this._maxRetries + 1
- : 1;
- let numTries = 0;
- let response;
- do {
- response = yield this.requestRaw(info, data);
- // Check if it's an authentication challenge
- if (response &&
- response.message &&
- response.message.statusCode === HttpCodes.Unauthorized) {
- let authenticationHandler;
- for (const handler of this.handlers) {
- if (handler.canHandleAuthentication(response)) {
- authenticationHandler = handler;
- break;
- }
- }
- if (authenticationHandler) {
- return authenticationHandler.handleAuthentication(this, info, data);
- }
- else {
- // We have received an unauthorized response but have no handlers to handle it.
- // Let the response return to the caller.
- return response;
- }
- }
- let redirectsRemaining = this._maxRedirects;
- while (response.message.statusCode &&
- HttpRedirectCodes.includes(response.message.statusCode) &&
- this._allowRedirects &&
- redirectsRemaining > 0) {
- const redirectUrl = response.message.headers['location'];
- if (!redirectUrl) {
- // if there's no location to redirect to, we won't
- break;
- }
- const parsedRedirectUrl = new URL(redirectUrl);
- if (parsedUrl.protocol === 'https:' &&
- parsedUrl.protocol !== parsedRedirectUrl.protocol &&
- !this._allowRedirectDowngrade) {
- throw new Error('Redirect from HTTPS to HTTP protocol. This downgrade is not allowed for security reasons. If you want to allow this behavior, set the allowRedirectDowngrade option to true.');
- }
- // we need to finish reading the response before reassigning response
- // which will leak the open socket.
- yield response.readBody();
- // strip authorization header if redirected to a different hostname
- if (parsedRedirectUrl.hostname !== parsedUrl.hostname) {
- for (const header in headers) {
- // header names are case insensitive
- if (header.toLowerCase() === 'authorization') {
- delete headers[header];
- }
- }
- }
- // let's make the request with the new redirectUrl
- info = this._prepareRequest(verb, parsedRedirectUrl, headers);
- response = yield this.requestRaw(info, data);
- redirectsRemaining--;
- }
- if (!response.message.statusCode ||
- !HttpResponseRetryCodes.includes(response.message.statusCode)) {
- // If not a retry code, return immediately instead of retrying
- return response;
- }
- numTries += 1;
- if (numTries < maxTries) {
- yield response.readBody();
- yield this._performExponentialBackoff(numTries);
- }
- } while (numTries < maxTries);
- return response;
- });
+ addRaw(text, addEOL = false) {
+ this._buffer += text;
+ return addEOL ? this.addEOL() : this;
}
/**
- * Needs to be called if keepAlive is set to true in request options.
+ * Adds the operating system-specific end-of-line marker to the buffer
+ *
+ * @returns {Summary} summary instance
*/
- dispose() {
- if (this._agent) {
- this._agent.destroy();
- }
- this._disposed = true;
+ addEOL() {
+ return this.addRaw(external_os_.EOL);
}
/**
- * Raw request.
- * @param info
- * @param data
+ * Adds an HTML codeblock to the summary buffer
+ *
+ * @param {string} code content to render within fenced code block
+ * @param {string} lang (optional) language to syntax highlight code
+ *
+ * @returns {Summary} summary instance
*/
- requestRaw(info, data) {
- return __awaiter(this, void 0, void 0, function* () {
- return new Promise((resolve, reject) => {
- function callbackForResult(err, res) {
- if (err) {
- reject(err);
- }
- else if (!res) {
- // If `err` is not passed, then `res` must be passed.
- reject(new Error('Unknown error'));
- }
- else {
- resolve(res);
- }
- }
- this.requestRawWithCallback(info, data, callbackForResult);
- });
- });
+ addCodeBlock(code, lang) {
+ const attrs = Object.assign({}, (lang && { lang }));
+ const element = this.wrap('pre', this.wrap('code', code), attrs);
+ return this.addRaw(element).addEOL();
}
/**
- * Raw request with callback.
- * @param info
- * @param data
- * @param onResult
+ * Adds an HTML list to the summary buffer
+ *
+ * @param {string[]} items list of items to render
+ * @param {boolean} [ordered=false] (optional) if the rendered list should be ordered or not (default: false)
+ *
+ * @returns {Summary} summary instance
*/
- requestRawWithCallback(info, data, onResult) {
- if (typeof data === 'string') {
- if (!info.options.headers) {
- info.options.headers = {};
- }
- info.options.headers['Content-Length'] = Buffer.byteLength(data, 'utf8');
- }
- let callbackCalled = false;
- function handleResult(err, res) {
- if (!callbackCalled) {
- callbackCalled = true;
- onResult(err, res);
- }
- }
- const req = info.httpModule.request(info.options, (msg) => {
- const res = new HttpClientResponse(msg);
- handleResult(undefined, res);
- });
- let socket;
- req.on('socket', sock => {
- socket = sock;
- });
- // If we ever get disconnected, we want the socket to timeout eventually
- req.setTimeout(this._socketTimeout || 3 * 60000, () => {
- if (socket) {
- socket.end();
- }
- handleResult(new Error(`Request timeout: ${info.options.path}`));
- });
- req.on('error', function (err) {
- // err has statusCode property
- // res should have headers
- handleResult(err);
- });
- if (data && typeof data === 'string') {
- req.write(data, 'utf8');
- }
- if (data && typeof data !== 'string') {
- data.on('close', function () {
- req.end();
- });
- data.pipe(req);
- }
- else {
- req.end();
- }
+ addList(items, ordered = false) {
+ const tag = ordered ? 'ol' : 'ul';
+ const listItems = items.map(item => this.wrap('li', item)).join('');
+ const element = this.wrap(tag, listItems);
+ return this.addRaw(element).addEOL();
}
/**
- * Gets an http agent. This function is useful when you need an http agent that handles
- * routing through a proxy server - depending upon the url and proxy environment variables.
- * @param serverUrl The server URL where the request will be sent. For example, https://api.github.com
+ * Adds an HTML table to the summary buffer
+ *
+ * @param {SummaryTableCell[]} rows table rows
+ *
+ * @returns {Summary} summary instance
*/
- getAgent(serverUrl) {
- const parsedUrl = new URL(serverUrl);
- return this._getAgent(parsedUrl);
+ addTable(rows) {
+ const tableBody = rows
+ .map(row => {
+ const cells = row
+ .map(cell => {
+ if (typeof cell === 'string') {
+ return this.wrap('td', cell);
+ }
+ const { header, data, colspan, rowspan } = cell;
+ const tag = header ? 'th' : 'td';
+ const attrs = Object.assign(Object.assign({}, (colspan && { colspan })), (rowspan && { rowspan }));
+ return this.wrap(tag, data, attrs);
+ })
+ .join('');
+ return this.wrap('tr', cells);
+ })
+ .join('');
+ const element = this.wrap('table', tableBody);
+ return this.addRaw(element).addEOL();
}
- getAgentDispatcher(serverUrl) {
- const parsedUrl = new URL(serverUrl);
- const proxyUrl = getProxyUrl(parsedUrl);
- const useProxy = proxyUrl && proxyUrl.hostname;
- if (!useProxy) {
- return;
- }
- return this._getProxyAgentDispatcher(parsedUrl, proxyUrl);
+ /**
+ * Adds a collapsable HTML details element to the summary buffer
+ *
+ * @param {string} label text for the closed state
+ * @param {string} content collapsable content
+ *
+ * @returns {Summary} summary instance
+ */
+ addDetails(label, content) {
+ const element = this.wrap('details', this.wrap('summary', label) + content);
+ return this.addRaw(element).addEOL();
}
- _prepareRequest(method, requestUrl, headers) {
- const info = {};
- info.parsedUrl = requestUrl;
- const usingSsl = info.parsedUrl.protocol === 'https:';
- info.httpModule = usingSsl ? external_https_namespaceObject : external_http_namespaceObject;
- const defaultPort = usingSsl ? 443 : 80;
- info.options = {};
- info.options.host = info.parsedUrl.hostname;
- info.options.port = info.parsedUrl.port
- ? parseInt(info.parsedUrl.port)
- : defaultPort;
- info.options.path =
- (info.parsedUrl.pathname || '') + (info.parsedUrl.search || '');
- info.options.method = method;
- info.options.headers = this._mergeHeaders(headers);
- if (this.userAgent != null) {
- info.options.headers['user-agent'] = this.userAgent;
- }
- info.options.agent = this._getAgent(info.parsedUrl);
- // gives handlers an opportunity to participate
- if (this.handlers) {
- for (const handler of this.handlers) {
- handler.prepareRequest(info.options);
- }
- }
- return info;
+ /**
+ * Adds an HTML image tag to the summary buffer
+ *
+ * @param {string} src path to the image you to embed
+ * @param {string} alt text description of the image
+ * @param {SummaryImageOptions} options (optional) addition image attributes
+ *
+ * @returns {Summary} summary instance
+ */
+ addImage(src, alt, options) {
+ const { width, height } = options || {};
+ const attrs = Object.assign(Object.assign({}, (width && { width })), (height && { height }));
+ const element = this.wrap('img', null, Object.assign({ src, alt }, attrs));
+ return this.addRaw(element).addEOL();
}
- _mergeHeaders(headers) {
- if (this.requestOptions && this.requestOptions.headers) {
- return Object.assign({}, lowercaseKeys(this.requestOptions.headers), lowercaseKeys(headers || {}));
- }
- return lowercaseKeys(headers || {});
+ /**
+ * Adds an HTML section heading element
+ *
+ * @param {string} text heading text
+ * @param {number | string} [level=1] (optional) the heading level, default: 1
+ *
+ * @returns {Summary} summary instance
+ */
+ addHeading(text, level) {
+ const tag = `h${level}`;
+ const allowedTag = ['h1', 'h2', 'h3', 'h4', 'h5', 'h6'].includes(tag)
+ ? tag
+ : 'h1';
+ const element = this.wrap(allowedTag, text);
+ return this.addRaw(element).addEOL();
}
/**
- * Gets an existing header value or returns a default.
- * Handles converting number header values to strings since HTTP headers must be strings.
- * Note: This returns string | string[] since some headers can have multiple values.
- * For headers that must always be a single string (like Content-Type), use the
- * specialized _getExistingOrDefaultContentTypeHeader method instead.
+ * Adds an HTML thematic break (
) to the summary buffer
+ *
+ * @returns {Summary} summary instance
*/
- _getExistingOrDefaultHeader(additionalHeaders, header, _default) {
- let clientHeader;
- if (this.requestOptions && this.requestOptions.headers) {
- const headerValue = lowercaseKeys(this.requestOptions.headers)[header];
- if (headerValue) {
- clientHeader =
- typeof headerValue === 'number' ? headerValue.toString() : headerValue;
- }
- }
- const additionalValue = additionalHeaders[header];
- if (additionalValue !== undefined) {
- return typeof additionalValue === 'number'
- ? additionalValue.toString()
- : additionalValue;
- }
- if (clientHeader !== undefined) {
- return clientHeader;
- }
- return _default;
+ addSeparator() {
+ const element = this.wrap('hr', null);
+ return this.addRaw(element).addEOL();
}
/**
- * Specialized version of _getExistingOrDefaultHeader for Content-Type header.
- * Always returns a single string (not an array) since Content-Type should be a single value.
- * Converts arrays to comma-separated strings and numbers to strings to ensure type safety.
- * This was split from _getExistingOrDefaultHeader to provide stricter typing for callers
- * that assign the result to places expecting a string (e.g., additionalHeaders[Headers.ContentType]).
+ * Adds an HTML line break (
) to the summary buffer
+ *
+ * @returns {Summary} summary instance
*/
- _getExistingOrDefaultContentTypeHeader(additionalHeaders, _default) {
- let clientHeader;
- if (this.requestOptions && this.requestOptions.headers) {
- const headerValue = lowercaseKeys(this.requestOptions.headers)[Headers.ContentType];
- if (headerValue) {
- if (typeof headerValue === 'number') {
- clientHeader = String(headerValue);
- }
- else if (Array.isArray(headerValue)) {
- clientHeader = headerValue.join(', ');
- }
- else {
- clientHeader = headerValue;
- }
- }
- }
- const additionalValue = additionalHeaders[Headers.ContentType];
- // Return the first non-undefined value, converting numbers or arrays to strings if necessary
- if (additionalValue !== undefined) {
- if (typeof additionalValue === 'number') {
- return String(additionalValue);
- }
- else if (Array.isArray(additionalValue)) {
- return additionalValue.join(', ');
- }
- else {
- return additionalValue;
- }
- }
- if (clientHeader !== undefined) {
- return clientHeader;
- }
- return _default;
+ addBreak() {
+ const element = this.wrap('br', null);
+ return this.addRaw(element).addEOL();
}
- _getAgent(parsedUrl) {
- let agent;
- const proxyUrl = getProxyUrl(parsedUrl);
- const useProxy = proxyUrl && proxyUrl.hostname;
- if (this._keepAlive && useProxy) {
- agent = this._proxyAgent;
- }
- if (!useProxy) {
- agent = this._agent;
- }
- // if agent is already assigned use that agent.
- if (agent) {
- return agent;
- }
- const usingSsl = parsedUrl.protocol === 'https:';
- let maxSockets = 100;
- if (this.requestOptions) {
- maxSockets = this.requestOptions.maxSockets || external_http_.globalAgent.maxSockets;
- }
- // This is `useProxy` again, but we need to check `proxyURl` directly for TypeScripts's flow analysis.
- if (proxyUrl && proxyUrl.hostname) {
- const agentOptions = {
- maxSockets,
- keepAlive: this._keepAlive,
- proxy: Object.assign(Object.assign({}, ((proxyUrl.username || proxyUrl.password) && {
- proxyAuth: `${proxyUrl.username}:${proxyUrl.password}`
- })), { host: proxyUrl.hostname, port: proxyUrl.port })
- };
- let tunnelAgent;
- const overHttps = proxyUrl.protocol === 'https:';
- if (usingSsl) {
- tunnelAgent = overHttps ? tunnel.httpsOverHttps : tunnel.httpsOverHttp;
- }
- else {
- tunnelAgent = overHttps ? tunnel.httpOverHttps : tunnel.httpOverHttp;
- }
- agent = tunnelAgent(agentOptions);
- this._proxyAgent = agent;
- }
- // if tunneling agent isn't assigned create a new agent
- if (!agent) {
- const options = { keepAlive: this._keepAlive, maxSockets };
- agent = usingSsl ? new external_https_.Agent(options) : new external_http_.Agent(options);
- this._agent = agent;
- }
- if (usingSsl && this._ignoreSslError) {
- // we don't want to set NODE_TLS_REJECT_UNAUTHORIZED=0 since that will affect request for entire process
- // http.RequestOptions doesn't expose a way to modify RequestOptions.agent.options
- // we have to cast it to any and change it directly
- agent.options = Object.assign(agent.options || {}, {
- rejectUnauthorized: false
- });
- }
- return agent;
- }
- _getProxyAgentDispatcher(parsedUrl, proxyUrl) {
- let proxyAgent;
- if (this._keepAlive) {
- proxyAgent = this._proxyAgentDispatcher;
- }
- // if agent is already assigned use that agent.
- if (proxyAgent) {
- return proxyAgent;
- }
- const usingSsl = parsedUrl.protocol === 'https:';
- proxyAgent = new undici/* ProxyAgent */.kT(Object.assign({ uri: proxyUrl.href, pipelining: !this._keepAlive ? 0 : 1 }, ((proxyUrl.username || proxyUrl.password) && {
- token: `Basic ${Buffer.from(`${proxyUrl.username}:${proxyUrl.password}`).toString('base64')}`
- })));
- this._proxyAgentDispatcher = proxyAgent;
- if (usingSsl && this._ignoreSslError) {
- // we don't want to set NODE_TLS_REJECT_UNAUTHORIZED=0 since that will affect request for entire process
- // http.RequestOptions doesn't expose a way to modify RequestOptions.agent.options
- // we have to cast it to any and change it directly
- proxyAgent.options = Object.assign(proxyAgent.options.requestTls || {}, {
- rejectUnauthorized: false
- });
- }
- return proxyAgent;
- }
- _getUserAgentWithOrchestrationId(userAgent) {
- const baseUserAgent = userAgent || 'actions/http-client';
- const orchId = process.env['ACTIONS_ORCHESTRATION_ID'];
- if (orchId) {
- // Sanitize the orchestration ID to ensure it contains only valid characters
- // Valid characters: 0-9, a-z, _, -, .
- const sanitizedId = orchId.replace(/[^a-z0-9_.-]/gi, '_');
- return `${baseUserAgent} actions_orchestration_id/${sanitizedId}`;
- }
- return baseUserAgent;
- }
- _performExponentialBackoff(retryNumber) {
- return __awaiter(this, void 0, void 0, function* () {
- retryNumber = Math.min(ExponentialBackoffCeiling, retryNumber);
- const ms = ExponentialBackoffTimeSlice * Math.pow(2, retryNumber);
- return new Promise(resolve => setTimeout(() => resolve(), ms));
- });
+ /**
+ * Adds an HTML blockquote to the summary buffer
+ *
+ * @param {string} text quote text
+ * @param {string} cite (optional) citation url
+ *
+ * @returns {Summary} summary instance
+ */
+ addQuote(text, cite) {
+ const attrs = Object.assign({}, (cite && { cite }));
+ const element = this.wrap('blockquote', text, attrs);
+ return this.addRaw(element).addEOL();
}
- _processResponse(res, options) {
- return __awaiter(this, void 0, void 0, function* () {
- return new Promise((resolve, reject) => __awaiter(this, void 0, void 0, function* () {
- const statusCode = res.message.statusCode || 0;
- const response = {
- statusCode,
- result: null,
- headers: {}
- };
- // not found leads to null obj returned
- if (statusCode === HttpCodes.NotFound) {
- resolve(response);
- }
- // get the result from the body
- function dateTimeDeserializer(key, value) {
- if (typeof value === 'string') {
- const a = new Date(value);
- if (!isNaN(a.valueOf())) {
- return a;
- }
- }
- return value;
- }
- let obj;
- let contents;
- try {
- contents = yield res.readBody();
- if (contents && contents.length > 0) {
- if (options && options.deserializeDates) {
- obj = JSON.parse(contents, dateTimeDeserializer);
- }
- else {
- obj = JSON.parse(contents);
- }
- response.result = obj;
- }
- response.headers = res.message.headers;
- }
- catch (err) {
- // Invalid resource (contents not json); leaving result obj null
- }
- // note that 3xx redirects are handled by the http layer.
- if (statusCode > 299) {
- let msg;
- // if exception/error in body, attempt to get better error
- if (obj && obj.message) {
- msg = obj.message;
- }
- else if (contents && contents.length > 0) {
- // it may be the case that the exception is in the body message as string
- msg = contents;
- }
- else {
- msg = `Failed request: (${statusCode})`;
- }
- const err = new HttpClientError(msg, statusCode);
- err.result = response.result;
- reject(err);
- }
- else {
- resolve(response);
- }
- }));
- });
+ /**
+ * Adds an HTML anchor tag to the summary buffer
+ *
+ * @param {string} text link text/content
+ * @param {string} href hyperlink
+ *
+ * @returns {Summary} summary instance
+ */
+ addLink(text, href) {
+ const element = this.wrap('a', text, { href });
+ return this.addRaw(element).addEOL();
}
}
-const lowercaseKeys = (obj) => Object.keys(obj).reduce((c, k) => ((c[k.toLowerCase()] = obj[k]), c), {});
-//# sourceMappingURL=index.js.map
-;// CONCATENATED MODULE: ./node_modules/@actions/http-client/lib/auth.js
-var auth_awaiter = (undefined && undefined.__awaiter) || function (thisArg, _arguments, P, generator) {
+const _summary = new Summary();
+/**
+ * @deprecated use `core.summary`
+ */
+const markdownSummary = (/* unused pure expression or super */ null && (_summary));
+const summary = _summary;
+//# sourceMappingURL=summary.js.map
+;// CONCATENATED MODULE: ./node_modules/@actions/core/lib/path-utils.js
+
+/**
+ * toPosixPath converts the given path to the posix form. On Windows, \\ will be
+ * replaced with /.
+ *
+ * @param pth. Path to transform.
+ * @return string Posix path.
+ */
+function toPosixPath(pth) {
+ return pth.replace(/[\\]/g, '/');
+}
+/**
+ * toWin32Path converts the given path to the win32 form. On Linux, / will be
+ * replaced with \\.
+ *
+ * @param pth. Path to transform.
+ * @return string Win32 path.
+ */
+function toWin32Path(pth) {
+ return pth.replace(/[/]/g, '\\');
+}
+/**
+ * toPlatformPath converts the given path to a platform-specific path. It does
+ * this by replacing instances of / and \ with the platform-specific path
+ * separator.
+ *
+ * @param pth The path to platformize.
+ * @return string The platform-specific path.
+ */
+function toPlatformPath(pth) {
+ return pth.replace(/[/\\]/g, path.sep);
+}
+//# sourceMappingURL=path-utils.js.map
+// EXTERNAL MODULE: ./node_modules/@actions/exec/lib/exec.js + 4 modules
+var lib_exec = __nccwpck_require__(382);
+;// CONCATENATED MODULE: ./node_modules/@actions/core/lib/platform.js
+var platform_awaiter = (undefined && undefined.__awaiter) || function (thisArg, _arguments, P, generator) {
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
return new (P || (P = Promise))(function (resolve, reject) {
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
@@ -32940,74 +32561,63 @@ var auth_awaiter = (undefined && undefined.__awaiter) || function (thisArg, _arg
step((generator = generator.apply(thisArg, _arguments || [])).next());
});
};
-class BasicCredentialHandler {
- constructor(username, password) {
- this.username = username;
- this.password = password;
- }
- prepareRequest(options) {
- if (!options.headers) {
- throw Error('The request has no headers');
- }
- options.headers['Authorization'] = `Basic ${Buffer.from(`${this.username}:${this.password}`).toString('base64')}`;
- }
- // This handler cannot handle 401
- canHandleAuthentication() {
- return false;
- }
- handleAuthentication() {
- return auth_awaiter(this, void 0, void 0, function* () {
- throw new Error('not implemented');
- });
- }
-}
-class auth_BearerCredentialHandler {
- constructor(token) {
- this.token = token;
- }
- // currently implements pre-authorization
- // TODO: support preAuth = false where it hooks on 401
- prepareRequest(options) {
- if (!options.headers) {
- throw Error('The request has no headers');
- }
- options.headers['Authorization'] = `Bearer ${this.token}`;
- }
- // This handler cannot handle 401
- canHandleAuthentication() {
- return false;
- }
- handleAuthentication() {
- return auth_awaiter(this, void 0, void 0, function* () {
- throw new Error('not implemented');
- });
- }
-}
-class PersonalAccessTokenCredentialHandler {
- constructor(token) {
- this.token = token;
- }
- // currently implements pre-authorization
- // TODO: support preAuth = false where it hooks on 401
- prepareRequest(options) {
- if (!options.headers) {
- throw Error('The request has no headers');
- }
- options.headers['Authorization'] = `Basic ${Buffer.from(`PAT:${this.token}`).toString('base64')}`;
- }
- // This handler cannot handle 401
- canHandleAuthentication() {
- return false;
- }
- handleAuthentication() {
- return auth_awaiter(this, void 0, void 0, function* () {
- throw new Error('not implemented');
- });
- }
+
+
+const getWindowsInfo = () => platform_awaiter(void 0, void 0, void 0, function* () {
+ const { stdout: version } = yield exec.getExecOutput('powershell -command "(Get-CimInstance -ClassName Win32_OperatingSystem).Version"', undefined, {
+ silent: true
+ });
+ const { stdout: name } = yield exec.getExecOutput('powershell -command "(Get-CimInstance -ClassName Win32_OperatingSystem).Caption"', undefined, {
+ silent: true
+ });
+ return {
+ name: name.trim(),
+ version: version.trim()
+ };
+});
+const getMacOsInfo = () => platform_awaiter(void 0, void 0, void 0, function* () {
+ var _a, _b, _c, _d;
+ const { stdout } = yield exec.getExecOutput('sw_vers', undefined, {
+ silent: true
+ });
+ const version = (_b = (_a = stdout.match(/ProductVersion:\s*(.+)/)) === null || _a === void 0 ? void 0 : _a[1]) !== null && _b !== void 0 ? _b : '';
+ const name = (_d = (_c = stdout.match(/ProductName:\s*(.+)/)) === null || _c === void 0 ? void 0 : _c[1]) !== null && _d !== void 0 ? _d : '';
+ return {
+ name,
+ version
+ };
+});
+const getLinuxInfo = () => platform_awaiter(void 0, void 0, void 0, function* () {
+ const { stdout } = yield exec.getExecOutput('lsb_release', ['-i', '-r', '-s'], {
+ silent: true
+ });
+ const [name, version] = stdout.trim().split('\n');
+ return {
+ name,
+ version
+ };
+});
+const platform = external_os_.platform();
+const arch = external_os_.arch();
+const isWindows = platform === 'win32';
+const isMacOS = platform === 'darwin';
+const isLinux = platform === 'linux';
+function getDetails() {
+ return platform_awaiter(this, void 0, void 0, function* () {
+ return Object.assign(Object.assign({}, (yield (isWindows
+ ? getWindowsInfo()
+ : isMacOS
+ ? getMacOsInfo()
+ : getLinuxInfo()))), { platform,
+ arch,
+ isWindows,
+ isMacOS,
+ isLinux });
+ });
}
-//# sourceMappingURL=auth.js.map
-;// CONCATENATED MODULE: ./node_modules/@actions/core/lib/oidc-utils.js
-var oidc_utils_awaiter = (undefined && undefined.__awaiter) || function (thisArg, _arguments, P, generator) {
+//# sourceMappingURL=platform.js.map
+;// CONCATENATED MODULE: ./node_modules/@actions/core/lib/core.js
+var core_awaiter = (undefined && undefined.__awaiter) || function (thisArg, _arguments, P, generator) {
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
return new (P || (P = Promise))(function (resolve, reject) {
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
@@ -33019,392 +32629,340 @@ var oidc_utils_awaiter = (undefined && undefined.__awaiter) || function (thisArg
-class oidc_utils_OidcClient {
- static createHttpClient(allowRetry = true, maxRetry = 10) {
- const requestOptions = {
- allowRetries: allowRetry,
- maxRetries: maxRetry
- };
- return new HttpClient('actions/oidc-client', [new BearerCredentialHandler(oidc_utils_OidcClient.getRequestToken())], requestOptions);
- }
- static getRequestToken() {
- const token = process.env['ACTIONS_ID_TOKEN_REQUEST_TOKEN'];
- if (!token) {
- throw new Error('Unable to get ACTIONS_ID_TOKEN_REQUEST_TOKEN env variable');
- }
- return token;
- }
- static getIDTokenUrl() {
- const runtimeUrl = process.env['ACTIONS_ID_TOKEN_REQUEST_URL'];
- if (!runtimeUrl) {
- throw new Error('Unable to get ACTIONS_ID_TOKEN_REQUEST_URL env variable');
- }
- return runtimeUrl;
- }
- static getCall(id_token_url) {
- return oidc_utils_awaiter(this, void 0, void 0, function* () {
- var _a;
- const httpclient = oidc_utils_OidcClient.createHttpClient();
- const res = yield httpclient
- .getJson(id_token_url)
- .catch(error => {
- throw new Error(`Failed to get ID Token. \n
- Error Code : ${error.statusCode}\n
- Error Message: ${error.message}`);
- });
- const id_token = (_a = res.result) === null || _a === void 0 ? void 0 : _a.value;
- if (!id_token) {
- throw new Error('Response json body do not have ID Token field');
- }
- return id_token;
- });
- }
- static getIDToken(audience) {
- return oidc_utils_awaiter(this, void 0, void 0, function* () {
- try {
- // New ID Token is requested from action service
- let id_token_url = oidc_utils_OidcClient.getIDTokenUrl();
- if (audience) {
- const encodedAudience = encodeURIComponent(audience);
- id_token_url = `${id_token_url}&audience=${encodedAudience}`;
- }
- debug(`ID token url is ${id_token_url}`);
- const id_token = yield oidc_utils_OidcClient.getCall(id_token_url);
- setSecret(id_token);
- return id_token;
- }
- catch (error) {
- throw new Error(`Error message: ${error.message}`);
- }
- });
- }
-}
-//# sourceMappingURL=oidc-utils.js.map
-;// CONCATENATED MODULE: ./node_modules/@actions/core/lib/summary.js
-var summary_awaiter = (undefined && undefined.__awaiter) || function (thisArg, _arguments, P, generator) {
- function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
- return new (P || (P = Promise))(function (resolve, reject) {
- function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
- function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
- function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
- step((generator = generator.apply(thisArg, _arguments || [])).next());
- });
-};
-const { access, appendFile, writeFile } = external_fs_namespaceObject.promises;
-const SUMMARY_ENV_VAR = 'GITHUB_STEP_SUMMARY';
-const SUMMARY_DOCS_URL = 'https://docs.github.com/actions/using-workflows/workflow-commands-for-github-actions#adding-a-job-summary';
-class Summary {
- constructor() {
- this._buffer = '';
- }
- /**
- * Finds the summary file path from the environment, rejects if env var is not found or file does not exist
- * Also checks r/w permissions.
- *
- * @returns step summary file path
- */
- filePath() {
- return summary_awaiter(this, void 0, void 0, function* () {
- if (this._filePath) {
- return this._filePath;
- }
- const pathFromEnv = process.env[SUMMARY_ENV_VAR];
- if (!pathFromEnv) {
- throw new Error(`Unable to find environment variable for $${SUMMARY_ENV_VAR}. Check if your runtime environment supports job summaries.`);
- }
- try {
- yield access(pathFromEnv, external_fs_namespaceObject.constants.R_OK | external_fs_namespaceObject.constants.W_OK);
- }
- catch (_a) {
- throw new Error(`Unable to access summary file: '${pathFromEnv}'. Check if the file has correct read/write permissions.`);
- }
- this._filePath = pathFromEnv;
- return this._filePath;
- });
- }
- /**
- * Wraps content in an HTML tag, adding any HTML attributes
- *
- * @param {string} tag HTML tag to wrap
- * @param {string | null} content content within the tag
- * @param {[attribute: string]: string} attrs key-value list of HTML attributes to add
- *
- * @returns {string} content wrapped in HTML element
- */
- wrap(tag, content, attrs = {}) {
- const htmlAttrs = Object.entries(attrs)
- .map(([key, value]) => ` ${key}="${value}"`)
- .join('');
- if (!content) {
- return `<${tag}${htmlAttrs}>`;
- }
- return `<${tag}${htmlAttrs}>${content}${tag}>`;
- }
- /**
- * Writes text in the buffer to the summary buffer file and empties buffer. Will append by default.
- *
- * @param {SummaryWriteOptions} [options] (optional) options for write operation
- *
- * @returns {Promise} summary instance
- */
- write(options) {
- return summary_awaiter(this, void 0, void 0, function* () {
- const overwrite = !!(options === null || options === void 0 ? void 0 : options.overwrite);
- const filePath = yield this.filePath();
- const writeFunc = overwrite ? writeFile : appendFile;
- yield writeFunc(filePath, this._buffer, { encoding: 'utf8' });
- return this.emptyBuffer();
- });
- }
- /**
- * Clears the summary buffer and wipes the summary file
- *
- * @returns {Summary} summary instance
- */
- clear() {
- return summary_awaiter(this, void 0, void 0, function* () {
- return this.emptyBuffer().write({ overwrite: true });
- });
- }
+
+/**
+ * The code to exit an action
+ */
+var ExitCode;
+(function (ExitCode) {
/**
- * Returns the current summary buffer as a string
- *
- * @returns {string} string of summary buffer
+ * A code indicating that the action was successful
*/
- stringify() {
- return this._buffer;
- }
+ ExitCode[ExitCode["Success"] = 0] = "Success";
/**
- * If the summary buffer is empty
- *
- * @returns {boolen} true if the buffer is empty
+ * A code indicating that the action was a failure
*/
- isEmptyBuffer() {
- return this._buffer.length === 0;
- }
- /**
- * Resets the summary buffer without writing to summary file
- *
- * @returns {Summary} summary instance
- */
- emptyBuffer() {
- this._buffer = '';
- return this;
- }
- /**
- * Adds raw text to the summary buffer
- *
- * @param {string} text content to add
- * @param {boolean} [addEOL=false] (optional) append an EOL to the raw text (default: false)
- *
- * @returns {Summary} summary instance
- */
- addRaw(text, addEOL = false) {
- this._buffer += text;
- return addEOL ? this.addEOL() : this;
- }
- /**
- * Adds the operating system-specific end-of-line marker to the buffer
- *
- * @returns {Summary} summary instance
- */
- addEOL() {
- return this.addRaw(external_os_namespaceObject.EOL);
- }
- /**
- * Adds an HTML codeblock to the summary buffer
- *
- * @param {string} code content to render within fenced code block
- * @param {string} lang (optional) language to syntax highlight code
- *
- * @returns {Summary} summary instance
- */
- addCodeBlock(code, lang) {
- const attrs = Object.assign({}, (lang && { lang }));
- const element = this.wrap('pre', this.wrap('code', code), attrs);
- return this.addRaw(element).addEOL();
- }
- /**
- * Adds an HTML list to the summary buffer
- *
- * @param {string[]} items list of items to render
- * @param {boolean} [ordered=false] (optional) if the rendered list should be ordered or not (default: false)
- *
- * @returns {Summary} summary instance
- */
- addList(items, ordered = false) {
- const tag = ordered ? 'ol' : 'ul';
- const listItems = items.map(item => this.wrap('li', item)).join('');
- const element = this.wrap(tag, listItems);
- return this.addRaw(element).addEOL();
- }
- /**
- * Adds an HTML table to the summary buffer
- *
- * @param {SummaryTableCell[]} rows table rows
- *
- * @returns {Summary} summary instance
- */
- addTable(rows) {
- const tableBody = rows
- .map(row => {
- const cells = row
- .map(cell => {
- if (typeof cell === 'string') {
- return this.wrap('td', cell);
- }
- const { header, data, colspan, rowspan } = cell;
- const tag = header ? 'th' : 'td';
- const attrs = Object.assign(Object.assign({}, (colspan && { colspan })), (rowspan && { rowspan }));
- return this.wrap(tag, data, attrs);
- })
- .join('');
- return this.wrap('tr', cells);
- })
- .join('');
- const element = this.wrap('table', tableBody);
- return this.addRaw(element).addEOL();
- }
- /**
- * Adds a collapsable HTML details element to the summary buffer
- *
- * @param {string} label text for the closed state
- * @param {string} content collapsable content
- *
- * @returns {Summary} summary instance
- */
- addDetails(label, content) {
- const element = this.wrap('details', this.wrap('summary', label) + content);
- return this.addRaw(element).addEOL();
- }
- /**
- * Adds an HTML image tag to the summary buffer
- *
- * @param {string} src path to the image you to embed
- * @param {string} alt text description of the image
- * @param {SummaryImageOptions} options (optional) addition image attributes
- *
- * @returns {Summary} summary instance
- */
- addImage(src, alt, options) {
- const { width, height } = options || {};
- const attrs = Object.assign(Object.assign({}, (width && { width })), (height && { height }));
- const element = this.wrap('img', null, Object.assign({ src, alt }, attrs));
- return this.addRaw(element).addEOL();
- }
- /**
- * Adds an HTML section heading element
- *
- * @param {string} text heading text
- * @param {number | string} [level=1] (optional) the heading level, default: 1
- *
- * @returns {Summary} summary instance
- */
- addHeading(text, level) {
- const tag = `h${level}`;
- const allowedTag = ['h1', 'h2', 'h3', 'h4', 'h5', 'h6'].includes(tag)
- ? tag
- : 'h1';
- const element = this.wrap(allowedTag, text);
- return this.addRaw(element).addEOL();
- }
- /**
- * Adds an HTML thematic break (
) to the summary buffer
- *
- * @returns {Summary} summary instance
- */
- addSeparator() {
- const element = this.wrap('hr', null);
- return this.addRaw(element).addEOL();
- }
- /**
- * Adds an HTML line break (
) to the summary buffer
- *
- * @returns {Summary} summary instance
- */
- addBreak() {
- const element = this.wrap('br', null);
- return this.addRaw(element).addEOL();
- }
- /**
- * Adds an HTML blockquote to the summary buffer
- *
- * @param {string} text quote text
- * @param {string} cite (optional) citation url
- *
- * @returns {Summary} summary instance
- */
- addQuote(text, cite) {
- const attrs = Object.assign({}, (cite && { cite }));
- const element = this.wrap('blockquote', text, attrs);
- return this.addRaw(element).addEOL();
- }
- /**
- * Adds an HTML anchor tag to the summary buffer
- *
- * @param {string} text link text/content
- * @param {string} href hyperlink
- *
- * @returns {Summary} summary instance
- */
- addLink(text, href) {
- const element = this.wrap('a', text, { href });
- return this.addRaw(element).addEOL();
+ ExitCode[ExitCode["Failure"] = 1] = "Failure";
+})(ExitCode || (ExitCode = {}));
+//-----------------------------------------------------------------------
+// Variables
+//-----------------------------------------------------------------------
+/**
+ * Sets env variable for this action and future actions in the job
+ * @param name the name of the variable to set
+ * @param val the value of the variable. Non-string values will be converted to a string via JSON.stringify
+ */
+// eslint-disable-next-line @typescript-eslint/no-explicit-any
+function exportVariable(name, val) {
+ const convertedVal = toCommandValue(val);
+ process.env[name] = convertedVal;
+ const filePath = process.env['GITHUB_ENV'] || '';
+ if (filePath) {
+ return issueFileCommand('ENV', prepareKeyValueMessage(name, val));
}
+ issueCommand('set-env', { name }, convertedVal);
}
-const _summary = new Summary();
/**
- * @deprecated use `core.summary`
+ * Registers a secret which will get masked from logs
+ *
+ * @param secret - Value of the secret to be masked
+ * @remarks
+ * This function instructs the Actions runner to mask the specified value in any
+ * logs produced during the workflow run. Once registered, the secret value will
+ * be replaced with asterisks (***) whenever it appears in console output, logs,
+ * or error messages.
+ *
+ * This is useful for protecting sensitive information such as:
+ * - API keys
+ * - Access tokens
+ * - Authentication credentials
+ * - URL parameters containing signatures (SAS tokens)
+ *
+ * Note that masking only affects future logs; any previous appearances of the
+ * secret in logs before calling this function will remain unmasked.
+ *
+ * @example
+ * ```typescript
+ * // Register an API token as a secret
+ * const apiToken = "abc123xyz456";
+ * setSecret(apiToken);
+ *
+ * // Now any logs containing this value will show *** instead
+ * console.log(`Using token: ${apiToken}`); // Outputs: "Using token: ***"
+ * ```
*/
-const markdownSummary = (/* unused pure expression or super */ null && (_summary));
-const summary = _summary;
-//# sourceMappingURL=summary.js.map
-;// CONCATENATED MODULE: ./node_modules/@actions/core/lib/path-utils.js
-
+function core_setSecret(secret) {
+ command_issueCommand('add-mask', {}, secret);
+}
/**
- * toPosixPath converts the given path to the posix form. On Windows, \\ will be
- * replaced with /.
- *
- * @param pth. Path to transform.
- * @return string Posix path.
+ * Prepends inputPath to the PATH (for this action and future actions)
+ * @param inputPath
*/
-function toPosixPath(pth) {
- return pth.replace(/[\\]/g, '/');
+function addPath(inputPath) {
+ const filePath = process.env['GITHUB_PATH'] || '';
+ if (filePath) {
+ file_command_issueFileCommand('PATH', inputPath);
+ }
+ else {
+ command_issueCommand('add-path', {}, inputPath);
+ }
+ process.env['PATH'] = `${inputPath}${external_path_.delimiter}${process.env['PATH']}`;
}
/**
- * toWin32Path converts the given path to the win32 form. On Linux, / will be
- * replaced with \\.
+ * Gets the value of an input.
+ * Unless trimWhitespace is set to false in InputOptions, the value is also trimmed.
+ * Returns an empty string if the value is not defined.
*
- * @param pth. Path to transform.
- * @return string Win32 path.
+ * @param name name of the input to get
+ * @param options optional. See InputOptions.
+ * @returns string
*/
-function toWin32Path(pth) {
- return pth.replace(/[/]/g, '\\');
+function getInput(name, options) {
+ const val = process.env[`INPUT_${name.replace(/ /g, '_').toUpperCase()}`] || '';
+ if (options && options.required && !val) {
+ throw new Error(`Input required and not supplied: ${name}`);
+ }
+ if (options && options.trimWhitespace === false) {
+ return val;
+ }
+ return val.trim();
}
/**
- * toPlatformPath converts the given path to a platform-specific path. It does
- * this by replacing instances of / and \ with the platform-specific path
- * separator.
+ * Gets the values of an multiline input. Each value is also trimmed.
+ *
+ * @param name name of the input to get
+ * @param options optional. See InputOptions.
+ * @returns string[]
*
- * @param pth The path to platformize.
- * @return string The platform-specific path.
*/
-function toPlatformPath(pth) {
- return pth.replace(/[/\\]/g, path.sep);
-}
-//# sourceMappingURL=path-utils.js.map
-// EXTERNAL MODULE: external "string_decoder"
-var external_string_decoder_ = __nccwpck_require__(3193);
+function getMultilineInput(name, options) {
+ const inputs = getInput(name, options)
+ .split('\n')
+ .filter(x => x !== '');
+ if (options && options.trimWhitespace === false) {
+ return inputs;
+ }
+ return inputs.map(input => input.trim());
+}
+/**
+ * Gets the input value of the boolean type in the YAML 1.2 "core schema" specification.
+ * Support boolean input list: `true | True | TRUE | false | False | FALSE` .
+ * The return value is also in boolean type.
+ * ref: https://yaml.org/spec/1.2/spec.html#id2804923
+ *
+ * @param name name of the input to get
+ * @param options optional. See InputOptions.
+ * @returns boolean
+ */
+function getBooleanInput(name, options) {
+ const trueValue = ['true', 'True', 'TRUE'];
+ const falseValue = ['false', 'False', 'FALSE'];
+ const val = getInput(name, options);
+ if (trueValue.includes(val))
+ return true;
+ if (falseValue.includes(val))
+ return false;
+ throw new TypeError(`Input does not meet YAML 1.2 "Core Schema" specification: ${name}\n` +
+ `Support boolean input list: \`true | True | TRUE | false | False | FALSE\``);
+}
+/**
+ * Sets the value of an output.
+ *
+ * @param name name of the output to set
+ * @param value value to store. Non-string values will be converted to a string via JSON.stringify
+ */
+// eslint-disable-next-line @typescript-eslint/no-explicit-any
+function setOutput(name, value) {
+ const filePath = process.env['GITHUB_OUTPUT'] || '';
+ if (filePath) {
+ return file_command_issueFileCommand('OUTPUT', file_command_prepareKeyValueMessage(name, value));
+ }
+ process.stdout.write(external_os_.EOL);
+ command_issueCommand('set-output', { name }, utils_toCommandValue(value));
+}
+/**
+ * Enables or disables the echoing of commands into stdout for the rest of the step.
+ * Echoing is disabled by default if ACTIONS_STEP_DEBUG is not set.
+ *
+ */
+function setCommandEcho(enabled) {
+ issue('echo', enabled ? 'on' : 'off');
+}
+//-----------------------------------------------------------------------
+// Results
+//-----------------------------------------------------------------------
+/**
+ * Sets the action status to failed.
+ * When the action exits it will be with an exit code of 1
+ * @param message add error issue message
+ */
+function setFailed(message) {
+ process.exitCode = ExitCode.Failure;
+ error(message);
+}
+//-----------------------------------------------------------------------
+// Logging Commands
+//-----------------------------------------------------------------------
+/**
+ * Gets whether Actions Step Debug is on or not
+ */
+function isDebug() {
+ return process.env['RUNNER_DEBUG'] === '1';
+}
+/**
+ * Writes debug message to user log
+ * @param message debug message
+ */
+function core_debug(message) {
+ command_issueCommand('debug', {}, message);
+}
+/**
+ * Adds an error issue
+ * @param message error issue message. Errors will be converted to string via toString()
+ * @param properties optional properties to add to the annotation.
+ */
+function error(message, properties = {}) {
+ command_issueCommand('error', utils_toCommandProperties(properties), message instanceof Error ? message.toString() : message);
+}
+/**
+ * Adds a warning issue
+ * @param message warning issue message. Errors will be converted to string via toString()
+ * @param properties optional properties to add to the annotation.
+ */
+function warning(message, properties = {}) {
+ command_issueCommand('warning', utils_toCommandProperties(properties), message instanceof Error ? message.toString() : message);
+}
+/**
+ * Adds a notice issue
+ * @param message notice issue message. Errors will be converted to string via toString()
+ * @param properties optional properties to add to the annotation.
+ */
+function notice(message, properties = {}) {
+ issueCommand('notice', toCommandProperties(properties), message instanceof Error ? message.toString() : message);
+}
+/**
+ * Writes info to log with console.log.
+ * @param message info message
+ */
+function info(message) {
+ process.stdout.write(message + external_os_.EOL);
+}
+/**
+ * Begin an output group.
+ *
+ * Output until the next `groupEnd` will be foldable in this group
+ *
+ * @param name The name of the output group
+ */
+function startGroup(name) {
+ issue('group', name);
+}
+/**
+ * End an output group.
+ */
+function endGroup() {
+ issue('endgroup');
+}
+/**
+ * Wrap an asynchronous function call in a group.
+ *
+ * Returns the same type as the function itself.
+ *
+ * @param name The name of the group
+ * @param fn The function to wrap in the group
+ */
+function group(name, fn) {
+ return core_awaiter(this, void 0, void 0, function* () {
+ startGroup(name);
+ let result;
+ try {
+ result = yield fn();
+ }
+ finally {
+ endGroup();
+ }
+ return result;
+ });
+}
+//-----------------------------------------------------------------------
+// Wrapper action state
+//-----------------------------------------------------------------------
+/**
+ * Saves state for current action, the state can only be retrieved by this action's post job execution.
+ *
+ * @param name name of the state to store
+ * @param value value to store. Non-string values will be converted to a string via JSON.stringify
+ */
+// eslint-disable-next-line @typescript-eslint/no-explicit-any
+function saveState(name, value) {
+ const filePath = process.env['GITHUB_STATE'] || '';
+ if (filePath) {
+ return issueFileCommand('STATE', prepareKeyValueMessage(name, value));
+ }
+ issueCommand('save-state', { name }, toCommandValue(value));
+}
+/**
+ * Gets the value of an state set by this action's main execution.
+ *
+ * @param name name of the state to get
+ * @returns string
+ */
+function getState(name) {
+ return process.env[`STATE_${name}`] || '';
+}
+function getIDToken(aud) {
+ return core_awaiter(this, void 0, void 0, function* () {
+ return yield OidcClient.getIDToken(aud);
+ });
+}
+/**
+ * Summary exports
+ */
+
+/**
+ * @deprecated use core.summary
+ */
+
+/**
+ * Path exports
+ */
+
+/**
+ * Platform utilities exports
+ */
+
+//# sourceMappingURL=core.js.map
+
+/***/ }),
+
+/***/ 382:
+/***/ ((__unused_webpack___webpack_module__, __webpack_exports__, __nccwpck_require__) => {
+
+
+// EXPORTS
+__nccwpck_require__.d(__webpack_exports__, {
+ m: () => (/* binding */ exec),
+ H: () => (/* binding */ getExecOutput)
+});
+
+// EXTERNAL MODULE: external "string_decoder"
+var external_string_decoder_ = __nccwpck_require__(3193);
+// EXTERNAL MODULE: external "os"
+var external_os_ = __nccwpck_require__(857);
// EXTERNAL MODULE: external "events"
var external_events_ = __nccwpck_require__(4434);
-;// CONCATENATED MODULE: external "child_process"
-const external_child_process_namespaceObject = __WEBPACK_EXTERNAL_createRequire(import.meta.url)("child_process");
+// EXTERNAL MODULE: external "child_process"
+var external_child_process_ = __nccwpck_require__(5317);
+// EXTERNAL MODULE: external "path"
+var external_path_ = __nccwpck_require__(6928);
// EXTERNAL MODULE: external "assert"
var external_assert_ = __nccwpck_require__(2613);
+// EXTERNAL MODULE: external "fs"
+var external_fs_ = __nccwpck_require__(9896);
;// CONCATENATED MODULE: ./node_modules/@actions/exec/node_modules/@actions/io/lib/io-util.js
-var io_util_awaiter = (undefined && undefined.__awaiter) || function (thisArg, _arguments, P, generator) {
+var __awaiter = (undefined && undefined.__awaiter) || function (thisArg, _arguments, P, generator) {
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
return new (P || (P = Promise))(function (resolve, reject) {
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
@@ -33415,7 +32973,7 @@ var io_util_awaiter = (undefined && undefined.__awaiter) || function (thisArg, _
};
-const { chmod, copyFile, lstat, mkdir, open: io_util_open, readdir, rename, rm, rmdir, stat, symlink, unlink } = external_fs_namespaceObject.promises;
+const { chmod, copyFile, lstat, mkdir, open: io_util_open, readdir, rename, rm, rmdir, stat, symlink, unlink } = external_fs_.promises;
// export const {open} = 'fs'
const IS_WINDOWS = process.platform === 'win32';
/**
@@ -33430,7 +32988,7 @@ const IS_WINDOWS = process.platform === 'win32';
* backslash to all junction results on Windows.
*/
function readlink(fsPath) {
- return io_util_awaiter(this, void 0, void 0, function* () {
+ return __awaiter(this, void 0, void 0, function* () {
const result = yield fs.promises.readlink(fsPath);
// On Windows, restore Node 20 behavior: add trailing backslash to all results
// since junctions on Windows are always directory links
@@ -33442,9 +33000,9 @@ function readlink(fsPath) {
}
// See https://github.com/nodejs/node/blob/d0153aee367422d0858105abec186da4dff0a0c5/deps/uv/include/uv/win.h#L691
const UV_FS_O_EXLOCK = 0x10000000;
-const READONLY = external_fs_namespaceObject.constants.O_RDONLY;
+const READONLY = external_fs_.constants.O_RDONLY;
function exists(fsPath) {
- return io_util_awaiter(this, void 0, void 0, function* () {
+ return __awaiter(this, void 0, void 0, function* () {
try {
yield stat(fsPath);
}
@@ -33458,7 +33016,7 @@ function exists(fsPath) {
});
}
function isDirectory(fsPath_1) {
- return io_util_awaiter(this, arguments, void 0, function* (fsPath, useStat = false) {
+ return __awaiter(this, arguments, void 0, function* (fsPath, useStat = false) {
const stats = useStat ? yield stat(fsPath) : yield lstat(fsPath);
return stats.isDirectory();
});
@@ -33485,7 +33043,7 @@ function isRooted(p) {
* @return if file exists and is executable, returns the file path. otherwise empty string.
*/
function tryGetExecutablePath(filePath, extensions) {
- return io_util_awaiter(this, void 0, void 0, function* () {
+ return __awaiter(this, void 0, void 0, function* () {
let stats = undefined;
try {
// test file exists
@@ -33604,7 +33162,7 @@ var io_awaiter = (undefined && undefined.__awaiter) || function (thisArg, _argum
* @param dest destination path
* @param options optional. See CopyOptions.
*/
-function io_cp(source_1, dest_1) {
+function cp(source_1, dest_1) {
return io_awaiter(this, arguments, void 0, function* (source, dest, options = {}) {
const { force, recursive, copySourceDirectory } = readCopyOptions(options);
const destStat = (yield ioUtil.exists(dest)) ? yield ioUtil.stat(dest) : null;
@@ -33935,13 +33493,13 @@ class ToolRunner extends external_events_.EventEmitter {
_processLineBuffer(data, strBuffer, onLine) {
try {
let s = strBuffer + data.toString();
- let n = s.indexOf(external_os_namespaceObject.EOL);
+ let n = s.indexOf(external_os_.EOL);
while (n > -1) {
const line = s.substring(0, n);
onLine(line);
// the rest of the string ...
- s = s.substring(n + external_os_namespaceObject.EOL.length);
- n = s.indexOf(external_os_namespaceObject.EOL);
+ s = s.substring(n + external_os_.EOL.length);
+ n = s.indexOf(external_os_.EOL);
}
return s;
}
@@ -34232,7 +33790,7 @@ class ToolRunner extends external_events_.EventEmitter {
}
const optionsNonNull = this._cloneExecOptions(this.options);
if (!optionsNonNull.silent && optionsNonNull.outStream) {
- optionsNonNull.outStream.write(this._getCommandString(optionsNonNull) + external_os_namespaceObject.EOL);
+ optionsNonNull.outStream.write(this._getCommandString(optionsNonNull) + external_os_.EOL);
}
const state = new ExecState(optionsNonNull, this.toolPath);
state.on('debug', (message) => {
@@ -34242,7 +33800,7 @@ class ToolRunner extends external_events_.EventEmitter {
return reject(new Error(`The cwd: ${this.options.cwd} does not exist!`));
}
const fileName = this._getSpawnFileName();
- const cp = external_child_process_namespaceObject.spawn(fileName, this._getSpawnArgs(optionsNonNull), this._getSpawnOptions(this.options, fileName));
+ const cp = external_child_process_.spawn(fileName, this._getSpawnArgs(optionsNonNull), this._getSpawnOptions(this.options, fileName));
let stdbuffer = '';
if (cp.stdout) {
cp.stdout.on('data', (data) => {
@@ -34467,7 +34025,7 @@ var exec_awaiter = (undefined && undefined.__awaiter) || function (thisArg, _arg
* @param options optional exec options. See ExecOptions
* @returns Promise exit code
*/
-function exec_exec(commandLine, args, options) {
+function exec(commandLine, args, options) {
return exec_awaiter(this, void 0, void 0, function* () {
const commandArgs = argStringToArray(commandLine);
if (commandArgs.length === 0) {
@@ -34513,7 +34071,7 @@ function getExecOutput(commandLine, args, options) {
}
};
const listeners = Object.assign(Object.assign({}, options === null || options === void 0 ? void 0 : options.listeners), { stdout: stdOutListener, stderr: stdErrListener });
- const exitCode = yield exec_exec(commandLine, args, Object.assign(Object.assign({}, options), { listeners }));
+ const exitCode = yield exec(commandLine, args, Object.assign(Object.assign({}, options), { listeners }));
//flush any remaining characters
stdout += stdoutDecoder.end();
stderr += stderrDecoder.end();
@@ -34525,73 +34083,124 @@ function getExecOutput(commandLine, args, options) {
});
}
//# sourceMappingURL=exec.js.map
-;// CONCATENATED MODULE: ./node_modules/@actions/core/lib/platform.js
-var platform_awaiter = (undefined && undefined.__awaiter) || function (thisArg, _arguments, P, generator) {
- function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
- return new (P || (P = Promise))(function (resolve, reject) {
- function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
- function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
- function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
- step((generator = generator.apply(thisArg, _arguments || [])).next());
- });
-};
+/***/ }),
-const getWindowsInfo = () => platform_awaiter(void 0, void 0, void 0, function* () {
- const { stdout: version } = yield exec.getExecOutput('powershell -command "(Get-CimInstance -ClassName Win32_OperatingSystem).Version"', undefined, {
- silent: true
- });
- const { stdout: name } = yield exec.getExecOutput('powershell -command "(Get-CimInstance -ClassName Win32_OperatingSystem).Caption"', undefined, {
- silent: true
- });
- return {
- name: name.trim(),
- version: version.trim()
- };
-});
-const getMacOsInfo = () => platform_awaiter(void 0, void 0, void 0, function* () {
- var _a, _b, _c, _d;
- const { stdout } = yield exec.getExecOutput('sw_vers', undefined, {
- silent: true
- });
- const version = (_b = (_a = stdout.match(/ProductVersion:\s*(.+)/)) === null || _a === void 0 ? void 0 : _a[1]) !== null && _b !== void 0 ? _b : '';
- const name = (_d = (_c = stdout.match(/ProductName:\s*(.+)/)) === null || _c === void 0 ? void 0 : _c[1]) !== null && _d !== void 0 ? _d : '';
- return {
- name,
- version
- };
-});
-const getLinuxInfo = () => platform_awaiter(void 0, void 0, void 0, function* () {
- const { stdout } = yield exec.getExecOutput('lsb_release', ['-i', '-r', '-s'], {
- silent: true
- });
- const [name, version] = stdout.trim().split('\n');
- return {
- name,
- version
- };
+/***/ 4942:
+/***/ ((__unused_webpack___webpack_module__, __webpack_exports__, __nccwpck_require__) => {
+
+
+// EXPORTS
+__nccwpck_require__.d(__webpack_exports__, {
+ Qq: () => (/* binding */ HttpClient)
});
-const platform = external_os_namespaceObject.platform();
-const arch = external_os_namespaceObject.arch();
-const isWindows = platform === 'win32';
-const isMacOS = platform === 'darwin';
-const isLinux = platform === 'linux';
-function getDetails() {
- return platform_awaiter(this, void 0, void 0, function* () {
- return Object.assign(Object.assign({}, (yield (isWindows
- ? getWindowsInfo()
- : isMacOS
- ? getMacOsInfo()
- : getLinuxInfo()))), { platform,
- arch,
- isWindows,
- isMacOS,
- isLinux });
- });
+
+// UNUSED EXPORTS: Headers, HttpClientError, HttpClientResponse, HttpCodes, MediaTypes, getProxyUrl, isHttps
+
+// EXTERNAL MODULE: external "http"
+var external_http_ = __nccwpck_require__(8611);
+var external_http_namespaceObject = /*#__PURE__*/__nccwpck_require__.t(external_http_, 2);
+// EXTERNAL MODULE: external "https"
+var external_https_ = __nccwpck_require__(5692);
+var external_https_namespaceObject = /*#__PURE__*/__nccwpck_require__.t(external_https_, 2);
+;// CONCATENATED MODULE: ./node_modules/@actions/http-client/lib/proxy.js
+function getProxyUrl(reqUrl) {
+ const usingSsl = reqUrl.protocol === 'https:';
+ if (checkBypass(reqUrl)) {
+ return undefined;
+ }
+ const proxyVar = (() => {
+ if (usingSsl) {
+ return process.env['https_proxy'] || process.env['HTTPS_PROXY'];
+ }
+ else {
+ return process.env['http_proxy'] || process.env['HTTP_PROXY'];
+ }
+ })();
+ if (proxyVar) {
+ try {
+ return new DecodedURL(proxyVar);
+ }
+ catch (_a) {
+ if (!proxyVar.startsWith('http://') && !proxyVar.startsWith('https://'))
+ return new DecodedURL(`http://${proxyVar}`);
+ }
+ }
+ else {
+ return undefined;
+ }
}
-//# sourceMappingURL=platform.js.map
-;// CONCATENATED MODULE: ./node_modules/@actions/core/lib/core.js
-var core_awaiter = (undefined && undefined.__awaiter) || function (thisArg, _arguments, P, generator) {
+function checkBypass(reqUrl) {
+ if (!reqUrl.hostname) {
+ return false;
+ }
+ const reqHost = reqUrl.hostname;
+ if (isLoopbackAddress(reqHost)) {
+ return true;
+ }
+ const noProxy = process.env['no_proxy'] || process.env['NO_PROXY'] || '';
+ if (!noProxy) {
+ return false;
+ }
+ // Determine the request port
+ let reqPort;
+ if (reqUrl.port) {
+ reqPort = Number(reqUrl.port);
+ }
+ else if (reqUrl.protocol === 'http:') {
+ reqPort = 80;
+ }
+ else if (reqUrl.protocol === 'https:') {
+ reqPort = 443;
+ }
+ // Format the request hostname and hostname with port
+ const upperReqHosts = [reqUrl.hostname.toUpperCase()];
+ if (typeof reqPort === 'number') {
+ upperReqHosts.push(`${upperReqHosts[0]}:${reqPort}`);
+ }
+ // Compare request host against noproxy
+ for (const upperNoProxyItem of noProxy
+ .split(',')
+ .map(x => x.trim().toUpperCase())
+ .filter(x => x)) {
+ if (upperNoProxyItem === '*' ||
+ upperReqHosts.some(x => x === upperNoProxyItem ||
+ x.endsWith(`.${upperNoProxyItem}`) ||
+ (upperNoProxyItem.startsWith('.') &&
+ x.endsWith(`${upperNoProxyItem}`)))) {
+ return true;
+ }
+ }
+ return false;
+}
+function isLoopbackAddress(host) {
+ const hostLower = host.toLowerCase();
+ return (hostLower === 'localhost' ||
+ hostLower.startsWith('127.') ||
+ hostLower.startsWith('[::1]') ||
+ hostLower.startsWith('[0:0:0:0:0:0:0:1]'));
+}
+class DecodedURL extends URL {
+ constructor(url, base) {
+ super(url, base);
+ this._decodedUsername = decodeURIComponent(super.username);
+ this._decodedPassword = decodeURIComponent(super.password);
+ }
+ get username() {
+ return this._decodedUsername;
+ }
+ get password() {
+ return this._decodedPassword;
+ }
+}
+//# sourceMappingURL=proxy.js.map
+// EXTERNAL MODULE: ./node_modules/tunnel/index.js
+var tunnel = __nccwpck_require__(770);
+// EXTERNAL MODULE: ./node_modules/undici/index.js
+var undici = __nccwpck_require__(6752);
+;// CONCATENATED MODULE: ./node_modules/@actions/http-client/lib/index.js
+/* eslint-disable @typescript-eslint/no-explicit-any */
+var __awaiter = (undefined && undefined.__awaiter) || function (thisArg, _arguments, P, generator) {
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
return new (P || (P = Promise))(function (resolve, reject) {
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
@@ -34605,309 +34214,896 @@ var core_awaiter = (undefined && undefined.__awaiter) || function (thisArg, _arg
-
-/**
- * The code to exit an action
- */
-var ExitCode;
-(function (ExitCode) {
- /**
- * A code indicating that the action was successful
- */
- ExitCode[ExitCode["Success"] = 0] = "Success";
- /**
- * A code indicating that the action was a failure
- */
- ExitCode[ExitCode["Failure"] = 1] = "Failure";
-})(ExitCode || (ExitCode = {}));
-//-----------------------------------------------------------------------
-// Variables
-//-----------------------------------------------------------------------
-/**
- * Sets env variable for this action and future actions in the job
- * @param name the name of the variable to set
- * @param val the value of the variable. Non-string values will be converted to a string via JSON.stringify
- */
-// eslint-disable-next-line @typescript-eslint/no-explicit-any
-function exportVariable(name, val) {
- const convertedVal = toCommandValue(val);
- process.env[name] = convertedVal;
- const filePath = process.env['GITHUB_ENV'] || '';
- if (filePath) {
- return issueFileCommand('ENV', prepareKeyValueMessage(name, val));
- }
- issueCommand('set-env', { name }, convertedVal);
-}
+var HttpCodes;
+(function (HttpCodes) {
+ HttpCodes[HttpCodes["OK"] = 200] = "OK";
+ HttpCodes[HttpCodes["MultipleChoices"] = 300] = "MultipleChoices";
+ HttpCodes[HttpCodes["MovedPermanently"] = 301] = "MovedPermanently";
+ HttpCodes[HttpCodes["ResourceMoved"] = 302] = "ResourceMoved";
+ HttpCodes[HttpCodes["SeeOther"] = 303] = "SeeOther";
+ HttpCodes[HttpCodes["NotModified"] = 304] = "NotModified";
+ HttpCodes[HttpCodes["UseProxy"] = 305] = "UseProxy";
+ HttpCodes[HttpCodes["SwitchProxy"] = 306] = "SwitchProxy";
+ HttpCodes[HttpCodes["TemporaryRedirect"] = 307] = "TemporaryRedirect";
+ HttpCodes[HttpCodes["PermanentRedirect"] = 308] = "PermanentRedirect";
+ HttpCodes[HttpCodes["BadRequest"] = 400] = "BadRequest";
+ HttpCodes[HttpCodes["Unauthorized"] = 401] = "Unauthorized";
+ HttpCodes[HttpCodes["PaymentRequired"] = 402] = "PaymentRequired";
+ HttpCodes[HttpCodes["Forbidden"] = 403] = "Forbidden";
+ HttpCodes[HttpCodes["NotFound"] = 404] = "NotFound";
+ HttpCodes[HttpCodes["MethodNotAllowed"] = 405] = "MethodNotAllowed";
+ HttpCodes[HttpCodes["NotAcceptable"] = 406] = "NotAcceptable";
+ HttpCodes[HttpCodes["ProxyAuthenticationRequired"] = 407] = "ProxyAuthenticationRequired";
+ HttpCodes[HttpCodes["RequestTimeout"] = 408] = "RequestTimeout";
+ HttpCodes[HttpCodes["Conflict"] = 409] = "Conflict";
+ HttpCodes[HttpCodes["Gone"] = 410] = "Gone";
+ HttpCodes[HttpCodes["TooManyRequests"] = 429] = "TooManyRequests";
+ HttpCodes[HttpCodes["InternalServerError"] = 500] = "InternalServerError";
+ HttpCodes[HttpCodes["NotImplemented"] = 501] = "NotImplemented";
+ HttpCodes[HttpCodes["BadGateway"] = 502] = "BadGateway";
+ HttpCodes[HttpCodes["ServiceUnavailable"] = 503] = "ServiceUnavailable";
+ HttpCodes[HttpCodes["GatewayTimeout"] = 504] = "GatewayTimeout";
+})(HttpCodes || (HttpCodes = {}));
+var Headers;
+(function (Headers) {
+ Headers["Accept"] = "accept";
+ Headers["ContentType"] = "content-type";
+})(Headers || (Headers = {}));
+var MediaTypes;
+(function (MediaTypes) {
+ MediaTypes["ApplicationJson"] = "application/json";
+})(MediaTypes || (MediaTypes = {}));
/**
- * Registers a secret which will get masked from logs
- *
- * @param secret - Value of the secret to be masked
- * @remarks
- * This function instructs the Actions runner to mask the specified value in any
- * logs produced during the workflow run. Once registered, the secret value will
- * be replaced with asterisks (***) whenever it appears in console output, logs,
- * or error messages.
- *
- * This is useful for protecting sensitive information such as:
- * - API keys
- * - Access tokens
- * - Authentication credentials
- * - URL parameters containing signatures (SAS tokens)
- *
- * Note that masking only affects future logs; any previous appearances of the
- * secret in logs before calling this function will remain unmasked.
- *
- * @example
- * ```typescript
- * // Register an API token as a secret
- * const apiToken = "abc123xyz456";
- * setSecret(apiToken);
- *
- * // Now any logs containing this value will show *** instead
- * console.log(`Using token: ${apiToken}`); // Outputs: "Using token: ***"
- * ```
+ * Returns the proxy URL, depending upon the supplied url and proxy environment variables.
+ * @param serverUrl The server URL where the request will be sent. For example, https://api.github.com
*/
-function core_setSecret(secret) {
- command_issueCommand('add-mask', {}, secret);
+function lib_getProxyUrl(serverUrl) {
+ const proxyUrl = pm.getProxyUrl(new URL(serverUrl));
+ return proxyUrl ? proxyUrl.href : '';
}
-/**
- * Prepends inputPath to the PATH (for this action and future actions)
- * @param inputPath
- */
-function addPath(inputPath) {
- const filePath = process.env['GITHUB_PATH'] || '';
- if (filePath) {
- file_command_issueFileCommand('PATH', inputPath);
+const HttpRedirectCodes = [
+ HttpCodes.MovedPermanently,
+ HttpCodes.ResourceMoved,
+ HttpCodes.SeeOther,
+ HttpCodes.TemporaryRedirect,
+ HttpCodes.PermanentRedirect
+];
+const HttpResponseRetryCodes = [
+ HttpCodes.BadGateway,
+ HttpCodes.ServiceUnavailable,
+ HttpCodes.GatewayTimeout
+];
+const RetryableHttpVerbs = ['OPTIONS', 'GET', 'DELETE', 'HEAD'];
+const ExponentialBackoffCeiling = 10;
+const ExponentialBackoffTimeSlice = 5;
+class HttpClientError extends Error {
+ constructor(message, statusCode) {
+ super(message);
+ this.name = 'HttpClientError';
+ this.statusCode = statusCode;
+ Object.setPrototypeOf(this, HttpClientError.prototype);
}
- else {
- command_issueCommand('add-path', {}, inputPath);
- }
- process.env['PATH'] = `${inputPath}${external_path_.delimiter}${process.env['PATH']}`;
}
-/**
- * Gets the value of an input.
- * Unless trimWhitespace is set to false in InputOptions, the value is also trimmed.
- * Returns an empty string if the value is not defined.
- *
- * @param name name of the input to get
- * @param options optional. See InputOptions.
- * @returns string
- */
-function getInput(name, options) {
- const val = process.env[`INPUT_${name.replace(/ /g, '_').toUpperCase()}`] || '';
- if (options && options.required && !val) {
- throw new Error(`Input required and not supplied: ${name}`);
- }
- if (options && options.trimWhitespace === false) {
- return val;
+class HttpClientResponse {
+ constructor(message) {
+ this.message = message;
}
- return val.trim();
-}
-/**
- * Gets the values of an multiline input. Each value is also trimmed.
- *
- * @param name name of the input to get
- * @param options optional. See InputOptions.
- * @returns string[]
- *
- */
-function getMultilineInput(name, options) {
- const inputs = getInput(name, options)
- .split('\n')
- .filter(x => x !== '');
- if (options && options.trimWhitespace === false) {
- return inputs;
+ readBody() {
+ return __awaiter(this, void 0, void 0, function* () {
+ return new Promise((resolve) => __awaiter(this, void 0, void 0, function* () {
+ let output = Buffer.alloc(0);
+ this.message.on('data', (chunk) => {
+ output = Buffer.concat([output, chunk]);
+ });
+ this.message.on('end', () => {
+ resolve(output.toString());
+ });
+ }));
+ });
}
- return inputs.map(input => input.trim());
-}
-/**
- * Gets the input value of the boolean type in the YAML 1.2 "core schema" specification.
- * Support boolean input list: `true | True | TRUE | false | False | FALSE` .
- * The return value is also in boolean type.
- * ref: https://yaml.org/spec/1.2/spec.html#id2804923
- *
- * @param name name of the input to get
- * @param options optional. See InputOptions.
- * @returns boolean
- */
-function getBooleanInput(name, options) {
- const trueValue = ['true', 'True', 'TRUE'];
- const falseValue = ['false', 'False', 'FALSE'];
- const val = getInput(name, options);
- if (trueValue.includes(val))
- return true;
- if (falseValue.includes(val))
- return false;
- throw new TypeError(`Input does not meet YAML 1.2 "Core Schema" specification: ${name}\n` +
- `Support boolean input list: \`true | True | TRUE | false | False | FALSE\``);
-}
-/**
- * Sets the value of an output.
- *
- * @param name name of the output to set
- * @param value value to store. Non-string values will be converted to a string via JSON.stringify
- */
-// eslint-disable-next-line @typescript-eslint/no-explicit-any
-function setOutput(name, value) {
- const filePath = process.env['GITHUB_OUTPUT'] || '';
- if (filePath) {
- return file_command_issueFileCommand('OUTPUT', file_command_prepareKeyValueMessage(name, value));
+ readBodyBuffer() {
+ return __awaiter(this, void 0, void 0, function* () {
+ return new Promise((resolve) => __awaiter(this, void 0, void 0, function* () {
+ const chunks = [];
+ this.message.on('data', (chunk) => {
+ chunks.push(chunk);
+ });
+ this.message.on('end', () => {
+ resolve(Buffer.concat(chunks));
+ });
+ }));
+ });
}
- process.stdout.write(external_os_namespaceObject.EOL);
- command_issueCommand('set-output', { name }, utils_toCommandValue(value));
-}
-/**
- * Enables or disables the echoing of commands into stdout for the rest of the step.
- * Echoing is disabled by default if ACTIONS_STEP_DEBUG is not set.
- *
- */
-function setCommandEcho(enabled) {
- issue('echo', enabled ? 'on' : 'off');
-}
-//-----------------------------------------------------------------------
-// Results
-//-----------------------------------------------------------------------
-/**
- * Sets the action status to failed.
- * When the action exits it will be with an exit code of 1
- * @param message add error issue message
- */
-function setFailed(message) {
- process.exitCode = ExitCode.Failure;
- error(message);
-}
-//-----------------------------------------------------------------------
-// Logging Commands
-//-----------------------------------------------------------------------
-/**
- * Gets whether Actions Step Debug is on or not
- */
-function isDebug() {
- return process.env['RUNNER_DEBUG'] === '1';
-}
-/**
- * Writes debug message to user log
- * @param message debug message
- */
-function core_debug(message) {
- command_issueCommand('debug', {}, message);
-}
-/**
- * Adds an error issue
- * @param message error issue message. Errors will be converted to string via toString()
- * @param properties optional properties to add to the annotation.
- */
-function error(message, properties = {}) {
- command_issueCommand('error', utils_toCommandProperties(properties), message instanceof Error ? message.toString() : message);
-}
-/**
- * Adds a warning issue
- * @param message warning issue message. Errors will be converted to string via toString()
- * @param properties optional properties to add to the annotation.
- */
-function warning(message, properties = {}) {
- command_issueCommand('warning', utils_toCommandProperties(properties), message instanceof Error ? message.toString() : message);
-}
-/**
- * Adds a notice issue
- * @param message notice issue message. Errors will be converted to string via toString()
- * @param properties optional properties to add to the annotation.
- */
-function notice(message, properties = {}) {
- issueCommand('notice', toCommandProperties(properties), message instanceof Error ? message.toString() : message);
-}
-/**
- * Writes info to log with console.log.
- * @param message info message
- */
-function info(message) {
- process.stdout.write(message + external_os_namespaceObject.EOL);
-}
-/**
- * Begin an output group.
- *
- * Output until the next `groupEnd` will be foldable in this group
- *
- * @param name The name of the output group
- */
-function startGroup(name) {
- issue('group', name);
}
-/**
- * End an output group.
- */
-function endGroup() {
- issue('endgroup');
+function isHttps(requestUrl) {
+ const parsedUrl = new URL(requestUrl);
+ return parsedUrl.protocol === 'https:';
}
-/**
- * Wrap an asynchronous function call in a group.
- *
- * Returns the same type as the function itself.
- *
- * @param name The name of the group
- * @param fn The function to wrap in the group
- */
-function group(name, fn) {
- return core_awaiter(this, void 0, void 0, function* () {
- startGroup(name);
- let result;
- try {
- result = yield fn();
- }
- finally {
- endGroup();
+class HttpClient {
+ constructor(userAgent, handlers, requestOptions) {
+ this._ignoreSslError = false;
+ this._allowRedirects = true;
+ this._allowRedirectDowngrade = false;
+ this._maxRedirects = 50;
+ this._allowRetries = false;
+ this._maxRetries = 1;
+ this._keepAlive = false;
+ this._disposed = false;
+ this.userAgent = this._getUserAgentWithOrchestrationId(userAgent);
+ this.handlers = handlers || [];
+ this.requestOptions = requestOptions;
+ if (requestOptions) {
+ if (requestOptions.ignoreSslError != null) {
+ this._ignoreSslError = requestOptions.ignoreSslError;
+ }
+ this._socketTimeout = requestOptions.socketTimeout;
+ if (requestOptions.allowRedirects != null) {
+ this._allowRedirects = requestOptions.allowRedirects;
+ }
+ if (requestOptions.allowRedirectDowngrade != null) {
+ this._allowRedirectDowngrade = requestOptions.allowRedirectDowngrade;
+ }
+ if (requestOptions.maxRedirects != null) {
+ this._maxRedirects = Math.max(requestOptions.maxRedirects, 0);
+ }
+ if (requestOptions.keepAlive != null) {
+ this._keepAlive = requestOptions.keepAlive;
+ }
+ if (requestOptions.allowRetries != null) {
+ this._allowRetries = requestOptions.allowRetries;
+ }
+ if (requestOptions.maxRetries != null) {
+ this._maxRetries = requestOptions.maxRetries;
+ }
}
- return result;
- });
-}
-//-----------------------------------------------------------------------
-// Wrapper action state
-//-----------------------------------------------------------------------
-/**
- * Saves state for current action, the state can only be retrieved by this action's post job execution.
- *
- * @param name name of the state to store
- * @param value value to store. Non-string values will be converted to a string via JSON.stringify
- */
-// eslint-disable-next-line @typescript-eslint/no-explicit-any
-function saveState(name, value) {
- const filePath = process.env['GITHUB_STATE'] || '';
- if (filePath) {
- return issueFileCommand('STATE', prepareKeyValueMessage(name, value));
}
- issueCommand('save-state', { name }, toCommandValue(value));
-}
-/**
- * Gets the value of an state set by this action's main execution.
- *
- * @param name name of the state to get
- * @returns string
- */
-function getState(name) {
- return process.env[`STATE_${name}`] || '';
-}
-function getIDToken(aud) {
- return core_awaiter(this, void 0, void 0, function* () {
- return yield OidcClient.getIDToken(aud);
- });
+ options(requestUrl, additionalHeaders) {
+ return __awaiter(this, void 0, void 0, function* () {
+ return this.request('OPTIONS', requestUrl, null, additionalHeaders || {});
+ });
+ }
+ get(requestUrl, additionalHeaders) {
+ return __awaiter(this, void 0, void 0, function* () {
+ return this.request('GET', requestUrl, null, additionalHeaders || {});
+ });
+ }
+ del(requestUrl, additionalHeaders) {
+ return __awaiter(this, void 0, void 0, function* () {
+ return this.request('DELETE', requestUrl, null, additionalHeaders || {});
+ });
+ }
+ post(requestUrl, data, additionalHeaders) {
+ return __awaiter(this, void 0, void 0, function* () {
+ return this.request('POST', requestUrl, data, additionalHeaders || {});
+ });
+ }
+ patch(requestUrl, data, additionalHeaders) {
+ return __awaiter(this, void 0, void 0, function* () {
+ return this.request('PATCH', requestUrl, data, additionalHeaders || {});
+ });
+ }
+ put(requestUrl, data, additionalHeaders) {
+ return __awaiter(this, void 0, void 0, function* () {
+ return this.request('PUT', requestUrl, data, additionalHeaders || {});
+ });
+ }
+ head(requestUrl, additionalHeaders) {
+ return __awaiter(this, void 0, void 0, function* () {
+ return this.request('HEAD', requestUrl, null, additionalHeaders || {});
+ });
+ }
+ sendStream(verb, requestUrl, stream, additionalHeaders) {
+ return __awaiter(this, void 0, void 0, function* () {
+ return this.request(verb, requestUrl, stream, additionalHeaders);
+ });
+ }
+ /**
+ * Gets a typed object from an endpoint
+ * Be aware that not found returns a null. Other errors (4xx, 5xx) reject the promise
+ */
+ getJson(requestUrl_1) {
+ return __awaiter(this, arguments, void 0, function* (requestUrl, additionalHeaders = {}) {
+ additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson);
+ const res = yield this.get(requestUrl, additionalHeaders);
+ return this._processResponse(res, this.requestOptions);
+ });
+ }
+ postJson(requestUrl_1, obj_1) {
+ return __awaiter(this, arguments, void 0, function* (requestUrl, obj, additionalHeaders = {}) {
+ const data = JSON.stringify(obj, null, 2);
+ additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson);
+ additionalHeaders[Headers.ContentType] =
+ this._getExistingOrDefaultContentTypeHeader(additionalHeaders, MediaTypes.ApplicationJson);
+ const res = yield this.post(requestUrl, data, additionalHeaders);
+ return this._processResponse(res, this.requestOptions);
+ });
+ }
+ putJson(requestUrl_1, obj_1) {
+ return __awaiter(this, arguments, void 0, function* (requestUrl, obj, additionalHeaders = {}) {
+ const data = JSON.stringify(obj, null, 2);
+ additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson);
+ additionalHeaders[Headers.ContentType] =
+ this._getExistingOrDefaultContentTypeHeader(additionalHeaders, MediaTypes.ApplicationJson);
+ const res = yield this.put(requestUrl, data, additionalHeaders);
+ return this._processResponse(res, this.requestOptions);
+ });
+ }
+ patchJson(requestUrl_1, obj_1) {
+ return __awaiter(this, arguments, void 0, function* (requestUrl, obj, additionalHeaders = {}) {
+ const data = JSON.stringify(obj, null, 2);
+ additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson);
+ additionalHeaders[Headers.ContentType] =
+ this._getExistingOrDefaultContentTypeHeader(additionalHeaders, MediaTypes.ApplicationJson);
+ const res = yield this.patch(requestUrl, data, additionalHeaders);
+ return this._processResponse(res, this.requestOptions);
+ });
+ }
+ /**
+ * Makes a raw http request.
+ * All other methods such as get, post, patch, and request ultimately call this.
+ * Prefer get, del, post and patch
+ */
+ request(verb, requestUrl, data, headers) {
+ return __awaiter(this, void 0, void 0, function* () {
+ if (this._disposed) {
+ throw new Error('Client has already been disposed.');
+ }
+ const parsedUrl = new URL(requestUrl);
+ let info = this._prepareRequest(verb, parsedUrl, headers);
+ // Only perform retries on reads since writes may not be idempotent.
+ const maxTries = this._allowRetries && RetryableHttpVerbs.includes(verb)
+ ? this._maxRetries + 1
+ : 1;
+ let numTries = 0;
+ let response;
+ do {
+ response = yield this.requestRaw(info, data);
+ // Check if it's an authentication challenge
+ if (response &&
+ response.message &&
+ response.message.statusCode === HttpCodes.Unauthorized) {
+ let authenticationHandler;
+ for (const handler of this.handlers) {
+ if (handler.canHandleAuthentication(response)) {
+ authenticationHandler = handler;
+ break;
+ }
+ }
+ if (authenticationHandler) {
+ return authenticationHandler.handleAuthentication(this, info, data);
+ }
+ else {
+ // We have received an unauthorized response but have no handlers to handle it.
+ // Let the response return to the caller.
+ return response;
+ }
+ }
+ let redirectsRemaining = this._maxRedirects;
+ while (response.message.statusCode &&
+ HttpRedirectCodes.includes(response.message.statusCode) &&
+ this._allowRedirects &&
+ redirectsRemaining > 0) {
+ const redirectUrl = response.message.headers['location'];
+ if (!redirectUrl) {
+ // if there's no location to redirect to, we won't
+ break;
+ }
+ const parsedRedirectUrl = new URL(redirectUrl);
+ if (parsedUrl.protocol === 'https:' &&
+ parsedUrl.protocol !== parsedRedirectUrl.protocol &&
+ !this._allowRedirectDowngrade) {
+ throw new Error('Redirect from HTTPS to HTTP protocol. This downgrade is not allowed for security reasons. If you want to allow this behavior, set the allowRedirectDowngrade option to true.');
+ }
+ // we need to finish reading the response before reassigning response
+ // which will leak the open socket.
+ yield response.readBody();
+ // strip authorization header if redirected to a different hostname
+ if (parsedRedirectUrl.hostname !== parsedUrl.hostname) {
+ for (const header in headers) {
+ // header names are case insensitive
+ if (header.toLowerCase() === 'authorization') {
+ delete headers[header];
+ }
+ }
+ }
+ // let's make the request with the new redirectUrl
+ info = this._prepareRequest(verb, parsedRedirectUrl, headers);
+ response = yield this.requestRaw(info, data);
+ redirectsRemaining--;
+ }
+ if (!response.message.statusCode ||
+ !HttpResponseRetryCodes.includes(response.message.statusCode)) {
+ // If not a retry code, return immediately instead of retrying
+ return response;
+ }
+ numTries += 1;
+ if (numTries < maxTries) {
+ yield response.readBody();
+ yield this._performExponentialBackoff(numTries);
+ }
+ } while (numTries < maxTries);
+ return response;
+ });
+ }
+ /**
+ * Needs to be called if keepAlive is set to true in request options.
+ */
+ dispose() {
+ if (this._agent) {
+ this._agent.destroy();
+ }
+ this._disposed = true;
+ }
+ /**
+ * Raw request.
+ * @param info
+ * @param data
+ */
+ requestRaw(info, data) {
+ return __awaiter(this, void 0, void 0, function* () {
+ return new Promise((resolve, reject) => {
+ function callbackForResult(err, res) {
+ if (err) {
+ reject(err);
+ }
+ else if (!res) {
+ // If `err` is not passed, then `res` must be passed.
+ reject(new Error('Unknown error'));
+ }
+ else {
+ resolve(res);
+ }
+ }
+ this.requestRawWithCallback(info, data, callbackForResult);
+ });
+ });
+ }
+ /**
+ * Raw request with callback.
+ * @param info
+ * @param data
+ * @param onResult
+ */
+ requestRawWithCallback(info, data, onResult) {
+ if (typeof data === 'string') {
+ if (!info.options.headers) {
+ info.options.headers = {};
+ }
+ info.options.headers['Content-Length'] = Buffer.byteLength(data, 'utf8');
+ }
+ let callbackCalled = false;
+ function handleResult(err, res) {
+ if (!callbackCalled) {
+ callbackCalled = true;
+ onResult(err, res);
+ }
+ }
+ const req = info.httpModule.request(info.options, (msg) => {
+ const res = new HttpClientResponse(msg);
+ handleResult(undefined, res);
+ });
+ let socket;
+ req.on('socket', sock => {
+ socket = sock;
+ });
+ // If we ever get disconnected, we want the socket to timeout eventually
+ req.setTimeout(this._socketTimeout || 3 * 60000, () => {
+ if (socket) {
+ socket.end();
+ }
+ handleResult(new Error(`Request timeout: ${info.options.path}`));
+ });
+ req.on('error', function (err) {
+ // err has statusCode property
+ // res should have headers
+ handleResult(err);
+ });
+ if (data && typeof data === 'string') {
+ req.write(data, 'utf8');
+ }
+ if (data && typeof data !== 'string') {
+ data.on('close', function () {
+ req.end();
+ });
+ data.pipe(req);
+ }
+ else {
+ req.end();
+ }
+ }
+ /**
+ * Gets an http agent. This function is useful when you need an http agent that handles
+ * routing through a proxy server - depending upon the url and proxy environment variables.
+ * @param serverUrl The server URL where the request will be sent. For example, https://api.github.com
+ */
+ getAgent(serverUrl) {
+ const parsedUrl = new URL(serverUrl);
+ return this._getAgent(parsedUrl);
+ }
+ getAgentDispatcher(serverUrl) {
+ const parsedUrl = new URL(serverUrl);
+ const proxyUrl = getProxyUrl(parsedUrl);
+ const useProxy = proxyUrl && proxyUrl.hostname;
+ if (!useProxy) {
+ return;
+ }
+ return this._getProxyAgentDispatcher(parsedUrl, proxyUrl);
+ }
+ _prepareRequest(method, requestUrl, headers) {
+ const info = {};
+ info.parsedUrl = requestUrl;
+ const usingSsl = info.parsedUrl.protocol === 'https:';
+ info.httpModule = usingSsl ? external_https_namespaceObject : external_http_namespaceObject;
+ const defaultPort = usingSsl ? 443 : 80;
+ info.options = {};
+ info.options.host = info.parsedUrl.hostname;
+ info.options.port = info.parsedUrl.port
+ ? parseInt(info.parsedUrl.port)
+ : defaultPort;
+ info.options.path =
+ (info.parsedUrl.pathname || '') + (info.parsedUrl.search || '');
+ info.options.method = method;
+ info.options.headers = this._mergeHeaders(headers);
+ if (this.userAgent != null) {
+ info.options.headers['user-agent'] = this.userAgent;
+ }
+ info.options.agent = this._getAgent(info.parsedUrl);
+ // gives handlers an opportunity to participate
+ if (this.handlers) {
+ for (const handler of this.handlers) {
+ handler.prepareRequest(info.options);
+ }
+ }
+ return info;
+ }
+ _mergeHeaders(headers) {
+ if (this.requestOptions && this.requestOptions.headers) {
+ return Object.assign({}, lowercaseKeys(this.requestOptions.headers), lowercaseKeys(headers || {}));
+ }
+ return lowercaseKeys(headers || {});
+ }
+ /**
+ * Gets an existing header value or returns a default.
+ * Handles converting number header values to strings since HTTP headers must be strings.
+ * Note: This returns string | string[] since some headers can have multiple values.
+ * For headers that must always be a single string (like Content-Type), use the
+ * specialized _getExistingOrDefaultContentTypeHeader method instead.
+ */
+ _getExistingOrDefaultHeader(additionalHeaders, header, _default) {
+ let clientHeader;
+ if (this.requestOptions && this.requestOptions.headers) {
+ const headerValue = lowercaseKeys(this.requestOptions.headers)[header];
+ if (headerValue) {
+ clientHeader =
+ typeof headerValue === 'number' ? headerValue.toString() : headerValue;
+ }
+ }
+ const additionalValue = additionalHeaders[header];
+ if (additionalValue !== undefined) {
+ return typeof additionalValue === 'number'
+ ? additionalValue.toString()
+ : additionalValue;
+ }
+ if (clientHeader !== undefined) {
+ return clientHeader;
+ }
+ return _default;
+ }
+ /**
+ * Specialized version of _getExistingOrDefaultHeader for Content-Type header.
+ * Always returns a single string (not an array) since Content-Type should be a single value.
+ * Converts arrays to comma-separated strings and numbers to strings to ensure type safety.
+ * This was split from _getExistingOrDefaultHeader to provide stricter typing for callers
+ * that assign the result to places expecting a string (e.g., additionalHeaders[Headers.ContentType]).
+ */
+ _getExistingOrDefaultContentTypeHeader(additionalHeaders, _default) {
+ let clientHeader;
+ if (this.requestOptions && this.requestOptions.headers) {
+ const headerValue = lowercaseKeys(this.requestOptions.headers)[Headers.ContentType];
+ if (headerValue) {
+ if (typeof headerValue === 'number') {
+ clientHeader = String(headerValue);
+ }
+ else if (Array.isArray(headerValue)) {
+ clientHeader = headerValue.join(', ');
+ }
+ else {
+ clientHeader = headerValue;
+ }
+ }
+ }
+ const additionalValue = additionalHeaders[Headers.ContentType];
+ // Return the first non-undefined value, converting numbers or arrays to strings if necessary
+ if (additionalValue !== undefined) {
+ if (typeof additionalValue === 'number') {
+ return String(additionalValue);
+ }
+ else if (Array.isArray(additionalValue)) {
+ return additionalValue.join(', ');
+ }
+ else {
+ return additionalValue;
+ }
+ }
+ if (clientHeader !== undefined) {
+ return clientHeader;
+ }
+ return _default;
+ }
+ _getAgent(parsedUrl) {
+ let agent;
+ const proxyUrl = getProxyUrl(parsedUrl);
+ const useProxy = proxyUrl && proxyUrl.hostname;
+ if (this._keepAlive && useProxy) {
+ agent = this._proxyAgent;
+ }
+ if (!useProxy) {
+ agent = this._agent;
+ }
+ // if agent is already assigned use that agent.
+ if (agent) {
+ return agent;
+ }
+ const usingSsl = parsedUrl.protocol === 'https:';
+ let maxSockets = 100;
+ if (this.requestOptions) {
+ maxSockets = this.requestOptions.maxSockets || external_http_.globalAgent.maxSockets;
+ }
+ // This is `useProxy` again, but we need to check `proxyURl` directly for TypeScripts's flow analysis.
+ if (proxyUrl && proxyUrl.hostname) {
+ const agentOptions = {
+ maxSockets,
+ keepAlive: this._keepAlive,
+ proxy: Object.assign(Object.assign({}, ((proxyUrl.username || proxyUrl.password) && {
+ proxyAuth: `${proxyUrl.username}:${proxyUrl.password}`
+ })), { host: proxyUrl.hostname, port: proxyUrl.port })
+ };
+ let tunnelAgent;
+ const overHttps = proxyUrl.protocol === 'https:';
+ if (usingSsl) {
+ tunnelAgent = overHttps ? tunnel.httpsOverHttps : tunnel.httpsOverHttp;
+ }
+ else {
+ tunnelAgent = overHttps ? tunnel.httpOverHttps : tunnel.httpOverHttp;
+ }
+ agent = tunnelAgent(agentOptions);
+ this._proxyAgent = agent;
+ }
+ // if tunneling agent isn't assigned create a new agent
+ if (!agent) {
+ const options = { keepAlive: this._keepAlive, maxSockets };
+ agent = usingSsl ? new external_https_.Agent(options) : new external_http_.Agent(options);
+ this._agent = agent;
+ }
+ if (usingSsl && this._ignoreSslError) {
+ // we don't want to set NODE_TLS_REJECT_UNAUTHORIZED=0 since that will affect request for entire process
+ // http.RequestOptions doesn't expose a way to modify RequestOptions.agent.options
+ // we have to cast it to any and change it directly
+ agent.options = Object.assign(agent.options || {}, {
+ rejectUnauthorized: false
+ });
+ }
+ return agent;
+ }
+ _getProxyAgentDispatcher(parsedUrl, proxyUrl) {
+ let proxyAgent;
+ if (this._keepAlive) {
+ proxyAgent = this._proxyAgentDispatcher;
+ }
+ // if agent is already assigned use that agent.
+ if (proxyAgent) {
+ return proxyAgent;
+ }
+ const usingSsl = parsedUrl.protocol === 'https:';
+ proxyAgent = new undici/* ProxyAgent */.kT(Object.assign({ uri: proxyUrl.href, pipelining: !this._keepAlive ? 0 : 1 }, ((proxyUrl.username || proxyUrl.password) && {
+ token: `Basic ${Buffer.from(`${proxyUrl.username}:${proxyUrl.password}`).toString('base64')}`
+ })));
+ this._proxyAgentDispatcher = proxyAgent;
+ if (usingSsl && this._ignoreSslError) {
+ // we don't want to set NODE_TLS_REJECT_UNAUTHORIZED=0 since that will affect request for entire process
+ // http.RequestOptions doesn't expose a way to modify RequestOptions.agent.options
+ // we have to cast it to any and change it directly
+ proxyAgent.options = Object.assign(proxyAgent.options.requestTls || {}, {
+ rejectUnauthorized: false
+ });
+ }
+ return proxyAgent;
+ }
+ _getUserAgentWithOrchestrationId(userAgent) {
+ const baseUserAgent = userAgent || 'actions/http-client';
+ const orchId = process.env['ACTIONS_ORCHESTRATION_ID'];
+ if (orchId) {
+ // Sanitize the orchestration ID to ensure it contains only valid characters
+ // Valid characters: 0-9, a-z, _, -, .
+ const sanitizedId = orchId.replace(/[^a-z0-9_.-]/gi, '_');
+ return `${baseUserAgent} actions_orchestration_id/${sanitizedId}`;
+ }
+ return baseUserAgent;
+ }
+ _performExponentialBackoff(retryNumber) {
+ return __awaiter(this, void 0, void 0, function* () {
+ retryNumber = Math.min(ExponentialBackoffCeiling, retryNumber);
+ const ms = ExponentialBackoffTimeSlice * Math.pow(2, retryNumber);
+ return new Promise(resolve => setTimeout(() => resolve(), ms));
+ });
+ }
+ _processResponse(res, options) {
+ return __awaiter(this, void 0, void 0, function* () {
+ return new Promise((resolve, reject) => __awaiter(this, void 0, void 0, function* () {
+ const statusCode = res.message.statusCode || 0;
+ const response = {
+ statusCode,
+ result: null,
+ headers: {}
+ };
+ // not found leads to null obj returned
+ if (statusCode === HttpCodes.NotFound) {
+ resolve(response);
+ }
+ // get the result from the body
+ function dateTimeDeserializer(key, value) {
+ if (typeof value === 'string') {
+ const a = new Date(value);
+ if (!isNaN(a.valueOf())) {
+ return a;
+ }
+ }
+ return value;
+ }
+ let obj;
+ let contents;
+ try {
+ contents = yield res.readBody();
+ if (contents && contents.length > 0) {
+ if (options && options.deserializeDates) {
+ obj = JSON.parse(contents, dateTimeDeserializer);
+ }
+ else {
+ obj = JSON.parse(contents);
+ }
+ response.result = obj;
+ }
+ response.headers = res.message.headers;
+ }
+ catch (err) {
+ // Invalid resource (contents not json); leaving result obj null
+ }
+ // note that 3xx redirects are handled by the http layer.
+ if (statusCode > 299) {
+ let msg;
+ // if exception/error in body, attempt to get better error
+ if (obj && obj.message) {
+ msg = obj.message;
+ }
+ else if (contents && contents.length > 0) {
+ // it may be the case that the exception is in the body message as string
+ msg = contents;
+ }
+ else {
+ msg = `Failed request: (${statusCode})`;
+ }
+ const err = new HttpClientError(msg, statusCode);
+ err.result = response.result;
+ reject(err);
+ }
+ else {
+ resolve(response);
+ }
+ }));
+ });
+ }
}
-/**
- * Summary exports
- */
-
-/**
- * @deprecated use core.summary
- */
+const lowercaseKeys = (obj) => Object.keys(obj).reduce((c, k) => ((c[k.toLowerCase()] = obj[k]), c), {});
+//# sourceMappingURL=index.js.map
-/**
- * Path exports
- */
+/***/ })
-/**
- * Platform utilities exports
- */
+/******/ });
+/************************************************************************/
+/******/ // The module cache
+/******/ var __webpack_module_cache__ = {};
+/******/
+/******/ // The require function
+/******/ function __nccwpck_require__(moduleId) {
+/******/ // Check if module is in cache
+/******/ var cachedModule = __webpack_module_cache__[moduleId];
+/******/ if (cachedModule !== undefined) {
+/******/ return cachedModule.exports;
+/******/ }
+/******/ // Create a new module (and put it into the cache)
+/******/ var module = __webpack_module_cache__[moduleId] = {
+/******/ // no module.id needed
+/******/ // no module.loaded needed
+/******/ exports: {}
+/******/ };
+/******/
+/******/ // Execute the module function
+/******/ var threw = true;
+/******/ try {
+/******/ __webpack_modules__[moduleId](module, module.exports, __nccwpck_require__);
+/******/ threw = false;
+/******/ } finally {
+/******/ if(threw) delete __webpack_module_cache__[moduleId];
+/******/ }
+/******/
+/******/ // Return the exports of the module
+/******/ return module.exports;
+/******/ }
+/******/
+/******/ // expose the modules object (__webpack_modules__)
+/******/ __nccwpck_require__.m = __webpack_modules__;
+/******/
+/************************************************************************/
+/******/ /* webpack/runtime/compat get default export */
+/******/ (() => {
+/******/ // getDefaultExport function for compatibility with non-harmony modules
+/******/ __nccwpck_require__.n = (module) => {
+/******/ var getter = module && module.__esModule ?
+/******/ () => (module['default']) :
+/******/ () => (module);
+/******/ __nccwpck_require__.d(getter, { a: getter });
+/******/ return getter;
+/******/ };
+/******/ })();
+/******/
+/******/ /* webpack/runtime/create fake namespace object */
+/******/ (() => {
+/******/ var getProto = Object.getPrototypeOf ? (obj) => (Object.getPrototypeOf(obj)) : (obj) => (obj.__proto__);
+/******/ var leafPrototypes;
+/******/ // create a fake namespace object
+/******/ // mode & 1: value is a module id, require it
+/******/ // mode & 2: merge all properties of value into the ns
+/******/ // mode & 4: return value when already ns object
+/******/ // mode & 16: return value when it's Promise-like
+/******/ // mode & 8|1: behave like require
+/******/ __nccwpck_require__.t = function(value, mode) {
+/******/ if(mode & 1) value = this(value);
+/******/ if(mode & 8) return value;
+/******/ if(typeof value === 'object' && value) {
+/******/ if((mode & 4) && value.__esModule) return value;
+/******/ if((mode & 16) && typeof value.then === 'function') return value;
+/******/ }
+/******/ var ns = Object.create(null);
+/******/ __nccwpck_require__.r(ns);
+/******/ var def = {};
+/******/ leafPrototypes = leafPrototypes || [null, getProto({}), getProto([]), getProto(getProto)];
+/******/ for(var current = mode & 2 && value; typeof current == 'object' && !~leafPrototypes.indexOf(current); current = getProto(current)) {
+/******/ Object.getOwnPropertyNames(current).forEach((key) => (def[key] = () => (value[key])));
+/******/ }
+/******/ def['default'] = () => (value);
+/******/ __nccwpck_require__.d(ns, def);
+/******/ return ns;
+/******/ };
+/******/ })();
+/******/
+/******/ /* webpack/runtime/define property getters */
+/******/ (() => {
+/******/ // define getter functions for harmony exports
+/******/ __nccwpck_require__.d = (exports, definition) => {
+/******/ for(var key in definition) {
+/******/ if(__nccwpck_require__.o(definition, key) && !__nccwpck_require__.o(exports, key)) {
+/******/ Object.defineProperty(exports, key, { enumerable: true, get: definition[key] });
+/******/ }
+/******/ }
+/******/ };
+/******/ })();
+/******/
+/******/ /* webpack/runtime/ensure chunk */
+/******/ (() => {
+/******/ __nccwpck_require__.f = {};
+/******/ // This file contains only the entry chunk.
+/******/ // The chunk loading function for additional chunks
+/******/ __nccwpck_require__.e = (chunkId) => {
+/******/ return Promise.all(Object.keys(__nccwpck_require__.f).reduce((promises, key) => {
+/******/ __nccwpck_require__.f[key](chunkId, promises);
+/******/ return promises;
+/******/ }, []));
+/******/ };
+/******/ })();
+/******/
+/******/ /* webpack/runtime/get javascript chunk filename */
+/******/ (() => {
+/******/ // This function allow to reference async chunks
+/******/ __nccwpck_require__.u = (chunkId) => {
+/******/ // return url for filenames based on template
+/******/ return "" + chunkId + ".index.js";
+/******/ };
+/******/ })();
+/******/
+/******/ /* webpack/runtime/hasOwnProperty shorthand */
+/******/ (() => {
+/******/ __nccwpck_require__.o = (obj, prop) => (Object.prototype.hasOwnProperty.call(obj, prop))
+/******/ })();
+/******/
+/******/ /* webpack/runtime/make namespace object */
+/******/ (() => {
+/******/ // define __esModule on exports
+/******/ __nccwpck_require__.r = (exports) => {
+/******/ if(typeof Symbol !== 'undefined' && Symbol.toStringTag) {
+/******/ Object.defineProperty(exports, Symbol.toStringTag, { value: 'Module' });
+/******/ }
+/******/ Object.defineProperty(exports, '__esModule', { value: true });
+/******/ };
+/******/ })();
+/******/
+/******/ /* webpack/runtime/compat */
+/******/
+/******/ if (typeof __nccwpck_require__ !== 'undefined') __nccwpck_require__.ab = new URL('.', import.meta.url).pathname.slice(import.meta.url.match(/^file:\/\/\/\w:/) ? 1 : 0, -1) + "/";
+/******/
+/******/ /* webpack/runtime/import chunk loading */
+/******/ (() => {
+/******/ // no baseURI
+/******/
+/******/ // object to store loaded and loading chunks
+/******/ // undefined = chunk not loaded, null = chunk preloaded/prefetched
+/******/ // [resolve, Promise] = chunk loading, 0 = chunk loaded
+/******/ var installedChunks = {
+/******/ 792: 0
+/******/ };
+/******/
+/******/ var installChunk = (data) => {
+/******/ var {ids, modules, runtime} = data;
+/******/ // add "modules" to the modules object,
+/******/ // then flag all "ids" as loaded and fire callback
+/******/ var moduleId, chunkId, i = 0;
+/******/ for(moduleId in modules) {
+/******/ if(__nccwpck_require__.o(modules, moduleId)) {
+/******/ __nccwpck_require__.m[moduleId] = modules[moduleId];
+/******/ }
+/******/ }
+/******/ if(runtime) runtime(__nccwpck_require__);
+/******/ for(;i < ids.length; i++) {
+/******/ chunkId = ids[i];
+/******/ if(__nccwpck_require__.o(installedChunks, chunkId) && installedChunks[chunkId]) {
+/******/ installedChunks[chunkId][0]();
+/******/ }
+/******/ installedChunks[ids[i]] = 0;
+/******/ }
+/******/
+/******/ }
+/******/
+/******/ __nccwpck_require__.f.j = (chunkId, promises) => {
+/******/ // import() chunk loading for javascript
+/******/ var installedChunkData = __nccwpck_require__.o(installedChunks, chunkId) ? installedChunks[chunkId] : undefined;
+/******/ if(installedChunkData !== 0) { // 0 means "already installed".
+/******/
+/******/ // a Promise means "currently loading".
+/******/ if(installedChunkData) {
+/******/ promises.push(installedChunkData[1]);
+/******/ } else {
+/******/ if(true) { // all chunks have JS
+/******/ // setup Promise in chunk cache
+/******/ var promise = import("./" + __nccwpck_require__.u(chunkId)).then(installChunk, (e) => {
+/******/ if(installedChunks[chunkId] !== 0) installedChunks[chunkId] = undefined;
+/******/ throw e;
+/******/ });
+/******/ var promise = Promise.race([promise, new Promise((resolve) => (installedChunkData = installedChunks[chunkId] = [resolve]))])
+/******/ promises.push(installedChunkData[1] = promise);
+/******/ }
+/******/ }
+/******/ }
+/******/ };
+/******/
+/******/ // no prefetching
+/******/
+/******/ // no preloaded
+/******/
+/******/ // no external install chunk
+/******/
+/******/ // no on chunks loaded
+/******/ })();
+/******/
+/************************************************************************/
+var __webpack_exports__ = {};
-//# sourceMappingURL=core.js.map
+// EXTERNAL MODULE: ./node_modules/@actions/core/lib/core.js + 8 modules
+var lib_core = __nccwpck_require__(6058);
+// EXTERNAL MODULE: ./node_modules/@actions/exec/lib/exec.js + 4 modules
+var lib_exec = __nccwpck_require__(382);
+// EXTERNAL MODULE: external "fs"
+var external_fs_ = __nccwpck_require__(9896);
+// EXTERNAL MODULE: external "path"
+var external_path_ = __nccwpck_require__(6928);
;// CONCATENATED MODULE: ./node_modules/js-yaml/dist/js-yaml.mjs
/*! js-yaml 4.1.1 https://github.com/nodeca/js-yaml @license MIT */
@@ -38766,8 +38962,10 @@ var jsYaml = {
+// EXTERNAL MODULE: external "assert"
+var external_assert_ = __nccwpck_require__(2613);
;// CONCATENATED MODULE: ./node_modules/@actions/io/lib/io-util.js
-var lib_io_util_awaiter = (undefined && undefined.__awaiter) || function (thisArg, _arguments, P, generator) {
+var __awaiter = (undefined && undefined.__awaiter) || function (thisArg, _arguments, P, generator) {
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
return new (P || (P = Promise))(function (resolve, reject) {
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
@@ -38778,9 +38976,9 @@ var lib_io_util_awaiter = (undefined && undefined.__awaiter) || function (thisAr
};
-const { chmod: io_util_chmod, copyFile: io_util_copyFile, lstat: io_util_lstat, mkdir: io_util_mkdir, open: lib_io_util_open, readdir: io_util_readdir, rename: io_util_rename, rm: io_util_rm, rmdir: io_util_rmdir, stat: io_util_stat, symlink: io_util_symlink, unlink: io_util_unlink } = external_fs_namespaceObject.promises;
+const { chmod, copyFile, lstat, mkdir, open: io_util_open, readdir, rename, rm, rmdir, stat, symlink, unlink } = external_fs_.promises;
// export const {open} = 'fs'
-const io_util_IS_WINDOWS = process.platform === 'win32';
+const IS_WINDOWS = process.platform === 'win32';
/**
* Custom implementation of readlink to ensure Windows junctions
* maintain trailing backslash for backward compatibility with Node.js < 24
@@ -38792,24 +38990,24 @@ const io_util_IS_WINDOWS = process.platform === 'win32';
* This implementation restores the Node 20 behavior by adding a trailing
* backslash to all junction results on Windows.
*/
-function io_util_readlink(fsPath) {
- return lib_io_util_awaiter(this, void 0, void 0, function* () {
- const result = yield external_fs_namespaceObject.promises.readlink(fsPath);
+function readlink(fsPath) {
+ return __awaiter(this, void 0, void 0, function* () {
+ const result = yield external_fs_.promises.readlink(fsPath);
// On Windows, restore Node 20 behavior: add trailing backslash to all results
// since junctions on Windows are always directory links
- if (io_util_IS_WINDOWS && !result.endsWith('\\')) {
+ if (IS_WINDOWS && !result.endsWith('\\')) {
return `${result}\\`;
}
return result;
});
}
// See https://github.com/nodejs/node/blob/d0153aee367422d0858105abec186da4dff0a0c5/deps/uv/include/uv/win.h#L691
-const io_util_UV_FS_O_EXLOCK = 0x10000000;
-const io_util_READONLY = external_fs_namespaceObject.constants.O_RDONLY;
-function io_util_exists(fsPath) {
- return lib_io_util_awaiter(this, void 0, void 0, function* () {
+const UV_FS_O_EXLOCK = 0x10000000;
+const READONLY = external_fs_.constants.O_RDONLY;
+function exists(fsPath) {
+ return __awaiter(this, void 0, void 0, function* () {
try {
- yield io_util_stat(fsPath);
+ yield stat(fsPath);
}
catch (err) {
if (err.code === 'ENOENT') {
@@ -38820,9 +39018,9 @@ function io_util_exists(fsPath) {
return true;
});
}
-function io_util_isDirectory(fsPath_1) {
- return lib_io_util_awaiter(this, arguments, void 0, function* (fsPath, useStat = false) {
- const stats = useStat ? yield io_util_stat(fsPath) : yield io_util_lstat(fsPath);
+function isDirectory(fsPath_1) {
+ return __awaiter(this, arguments, void 0, function* (fsPath, useStat = false) {
+ const stats = useStat ? yield stat(fsPath) : yield lstat(fsPath);
return stats.isDirectory();
});
}
@@ -38830,12 +39028,12 @@ function io_util_isDirectory(fsPath_1) {
* On OSX/Linux, true if path starts with '/'. On Windows, true for paths like:
* \, \hello, \\hello\share, C:, and C:\hello (and corresponding alternate separator cases).
*/
-function io_util_isRooted(p) {
- p = io_util_normalizeSeparators(p);
+function isRooted(p) {
+ p = normalizeSeparators(p);
if (!p) {
throw new Error('isRooted() parameter "p" cannot be empty');
}
- if (io_util_IS_WINDOWS) {
+ if (IS_WINDOWS) {
return (p.startsWith('\\') || /^[A-Z]:/i.test(p) // e.g. \ or \hello or \\hello
); // e.g. C: or C:\hello
}
@@ -38847,12 +39045,12 @@ function io_util_isRooted(p) {
* @param extensions additional file extensions to try
* @return if file exists and is executable, returns the file path. otherwise empty string.
*/
-function io_util_tryGetExecutablePath(filePath, extensions) {
- return lib_io_util_awaiter(this, void 0, void 0, function* () {
+function tryGetExecutablePath(filePath, extensions) {
+ return __awaiter(this, void 0, void 0, function* () {
let stats = undefined;
try {
// test file exists
- stats = yield io_util_stat(filePath);
+ stats = yield stat(filePath);
}
catch (err) {
if (err.code !== 'ENOENT') {
@@ -38861,7 +39059,7 @@ function io_util_tryGetExecutablePath(filePath, extensions) {
}
}
if (stats && stats.isFile()) {
- if (io_util_IS_WINDOWS) {
+ if (IS_WINDOWS) {
// on Windows, test for valid extension
const upperExt = path.extname(filePath).toUpperCase();
if (extensions.some(validExt => validExt.toUpperCase() === upperExt)) {
@@ -38869,7 +39067,7 @@ function io_util_tryGetExecutablePath(filePath, extensions) {
}
}
else {
- if (io_util_isUnixExecutable(stats)) {
+ if (isUnixExecutable(stats)) {
return filePath;
}
}
@@ -38880,7 +39078,7 @@ function io_util_tryGetExecutablePath(filePath, extensions) {
filePath = originalFilePath + extension;
stats = undefined;
try {
- stats = yield io_util_stat(filePath);
+ stats = yield stat(filePath);
}
catch (err) {
if (err.code !== 'ENOENT') {
@@ -38889,12 +39087,12 @@ function io_util_tryGetExecutablePath(filePath, extensions) {
}
}
if (stats && stats.isFile()) {
- if (io_util_IS_WINDOWS) {
+ if (IS_WINDOWS) {
// preserve the case of the actual file (since an extension was appended)
try {
const directory = path.dirname(filePath);
const upperName = path.basename(filePath).toUpperCase();
- for (const actualName of yield io_util_readdir(directory)) {
+ for (const actualName of yield readdir(directory)) {
if (upperName === actualName.toUpperCase()) {
filePath = path.join(directory, actualName);
break;
@@ -38908,7 +39106,7 @@ function io_util_tryGetExecutablePath(filePath, extensions) {
return filePath;
}
else {
- if (io_util_isUnixExecutable(stats)) {
+ if (isUnixExecutable(stats)) {
return filePath;
}
}
@@ -38917,9 +39115,9 @@ function io_util_tryGetExecutablePath(filePath, extensions) {
return '';
});
}
-function io_util_normalizeSeparators(p) {
+function normalizeSeparators(p) {
p = p || '';
- if (io_util_IS_WINDOWS) {
+ if (IS_WINDOWS) {
// convert slashes on Windows
p = p.replace(/\//g, '\\');
// remove redundant slashes
@@ -38931,7 +39129,7 @@ function io_util_normalizeSeparators(p) {
// on Mac/Linux, test the execute bit
// R W X R W X R W X
// 256 128 64 32 16 8 4 2 1
-function io_util_isUnixExecutable(stats) {
+function isUnixExecutable(stats) {
return ((stats.mode & 1) > 0 ||
((stats.mode & 8) > 0 &&
process.getgid !== undefined &&
@@ -38941,13 +39139,13 @@ function io_util_isUnixExecutable(stats) {
stats.uid === process.getuid()));
}
// Get the path of cmd.exe in windows
-function io_util_getCmdPath() {
+function getCmdPath() {
var _a;
return (_a = process.env['COMSPEC']) !== null && _a !== void 0 ? _a : `cmd.exe`;
}
//# sourceMappingURL=io-util.js.map
;// CONCATENATED MODULE: ./node_modules/@actions/io/lib/io.js
-var lib_io_awaiter = (undefined && undefined.__awaiter) || function (thisArg, _arguments, P, generator) {
+var io_awaiter = (undefined && undefined.__awaiter) || function (thisArg, _arguments, P, generator) {
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
return new (P || (P = Promise))(function (resolve, reject) {
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
@@ -38967,10 +39165,10 @@ var lib_io_awaiter = (undefined && undefined.__awaiter) || function (thisArg, _a
* @param dest destination path
* @param options optional. See CopyOptions.
*/
-function lib_io_cp(source_1, dest_1) {
- return lib_io_awaiter(this, arguments, void 0, function* (source, dest, options = {}) {
- const { force, recursive, copySourceDirectory } = io_readCopyOptions(options);
- const destStat = (yield io_util_exists(dest)) ? yield io_util_stat(dest) : null;
+function io_cp(source_1, dest_1) {
+ return io_awaiter(this, arguments, void 0, function* (source, dest, options = {}) {
+ const { force, recursive, copySourceDirectory } = readCopyOptions(options);
+ const destStat = (yield exists(dest)) ? yield stat(dest) : null;
// Dest is an existing file, but not forcing
if (destStat && destStat.isFile() && !force) {
return;
@@ -38979,16 +39177,16 @@ function lib_io_cp(source_1, dest_1) {
const newDest = destStat && destStat.isDirectory() && copySourceDirectory
? external_path_.join(dest, external_path_.basename(source))
: dest;
- if (!(yield io_util_exists(source))) {
+ if (!(yield exists(source))) {
throw new Error(`no such file or directory: ${source}`);
}
- const sourceStat = yield io_util_stat(source);
+ const sourceStat = yield stat(source);
if (sourceStat.isDirectory()) {
if (!recursive) {
throw new Error(`Failed to copy. ${source} is a directory, but tried to copy without recursive flag.`);
}
else {
- yield io_cpDirRecursive(source, newDest, 0, force);
+ yield cpDirRecursive(source, newDest, 0, force);
}
}
else {
@@ -38996,7 +39194,7 @@ function lib_io_cp(source_1, dest_1) {
// a file cannot be copied to itself
throw new Error(`'${newDest}' and '${source}' are the same file`);
}
- yield lib_io_copyFile(source, newDest, force);
+ yield io_copyFile(source, newDest, force);
}
});
}
@@ -39007,8 +39205,8 @@ function lib_io_cp(source_1, dest_1) {
* @param dest destination path
* @param options optional. See MoveOptions.
*/
-function io_mv(source_1, dest_1) {
- return lib_io_awaiter(this, arguments, void 0, function* (source, dest, options = {}) {
+function mv(source_1, dest_1) {
+ return io_awaiter(this, arguments, void 0, function* (source, dest, options = {}) {
if (yield ioUtil.exists(dest)) {
let destExists = true;
if (yield ioUtil.isDirectory(dest)) {
@@ -39018,14 +39216,14 @@ function io_mv(source_1, dest_1) {
}
if (destExists) {
if (options.force == null || options.force) {
- yield io_rmRF(dest);
+ yield rmRF(dest);
}
else {
throw new Error('Destination already exists');
}
}
}
- yield io_mkdirP(path.dirname(dest));
+ yield mkdirP(path.dirname(dest));
yield ioUtil.rename(source, dest);
});
}
@@ -39034,9 +39232,9 @@ function io_mv(source_1, dest_1) {
*
* @param inputPath path to remove
*/
-function io_rmRF(inputPath) {
- return lib_io_awaiter(this, void 0, void 0, function* () {
- if (io_util_IS_WINDOWS) {
+function rmRF(inputPath) {
+ return io_awaiter(this, void 0, void 0, function* () {
+ if (IS_WINDOWS) {
// Check for invalid characters
// https://docs.microsoft.com/en-us/windows/win32/fileio/naming-a-file
if (/[*"<>|]/.test(inputPath)) {
@@ -39045,7 +39243,7 @@ function io_rmRF(inputPath) {
}
try {
// note if path does not exist, error is silent
- yield io_util_rm(inputPath, {
+ yield rm(inputPath, {
force: true,
maxRetries: 3,
recursive: true,
@@ -39064,10 +39262,10 @@ function io_rmRF(inputPath) {
* @param fsPath path to create
* @returns Promise
*/
-function io_mkdirP(fsPath) {
- return lib_io_awaiter(this, void 0, void 0, function* () {
+function mkdirP(fsPath) {
+ return io_awaiter(this, void 0, void 0, function* () {
(0,external_assert_.ok)(fsPath, 'a path argument must be provided');
- yield io_util_mkdir(fsPath, { recursive: true });
+ yield mkdir(fsPath, { recursive: true });
});
}
/**
@@ -39078,14 +39276,14 @@ function io_mkdirP(fsPath) {
* @param check whether to check if tool exists
* @returns Promise path to tool
*/
-function io_which(tool, check) {
- return lib_io_awaiter(this, void 0, void 0, function* () {
+function which(tool, check) {
+ return io_awaiter(this, void 0, void 0, function* () {
if (!tool) {
throw new Error("parameter 'tool' is required");
}
// recursive when check=true
if (check) {
- const result = yield io_which(tool, false);
+ const result = yield which(tool, false);
if (!result) {
if (ioUtil.IS_WINDOWS) {
throw new Error(`Unable to locate executable file: ${tool}. Please verify either the file path exists or the file can be found within a directory specified by the PATH environment variable. Also verify the file has a valid extension for an executable file.`);
@@ -39096,7 +39294,7 @@ function io_which(tool, check) {
}
return result;
}
- const matches = yield io_findInPath(tool);
+ const matches = yield findInPath(tool);
if (matches && matches.length > 0) {
return matches[0];
}
@@ -39108,8 +39306,8 @@ function io_which(tool, check) {
*
* @returns Promise the paths of the tool
*/
-function io_findInPath(tool) {
- return lib_io_awaiter(this, void 0, void 0, function* () {
+function findInPath(tool) {
+ return io_awaiter(this, void 0, void 0, function* () {
if (!tool) {
throw new Error("parameter 'tool' is required");
}
@@ -39159,7 +39357,7 @@ function io_findInPath(tool) {
return matches;
});
}
-function io_readCopyOptions(options) {
+function readCopyOptions(options) {
const force = options.force == null ? true : options.force;
const recursive = Boolean(options.recursive);
const copySourceDirectory = options.copySourceDirectory == null
@@ -39167,59 +39365,65 @@ function io_readCopyOptions(options) {
: Boolean(options.copySourceDirectory);
return { force, recursive, copySourceDirectory };
}
-function io_cpDirRecursive(sourceDir, destDir, currentDepth, force) {
- return lib_io_awaiter(this, void 0, void 0, function* () {
+function cpDirRecursive(sourceDir, destDir, currentDepth, force) {
+ return io_awaiter(this, void 0, void 0, function* () {
// Ensure there is not a run away recursive copy
if (currentDepth >= 255)
return;
currentDepth++;
- yield io_mkdirP(destDir);
- const files = yield io_util_readdir(sourceDir);
+ yield mkdirP(destDir);
+ const files = yield readdir(sourceDir);
for (const fileName of files) {
const srcFile = `${sourceDir}/${fileName}`;
const destFile = `${destDir}/${fileName}`;
- const srcFileStat = yield io_util_lstat(srcFile);
+ const srcFileStat = yield lstat(srcFile);
if (srcFileStat.isDirectory()) {
// Recurse
- yield io_cpDirRecursive(srcFile, destFile, currentDepth, force);
+ yield cpDirRecursive(srcFile, destFile, currentDepth, force);
}
else {
- yield lib_io_copyFile(srcFile, destFile, force);
+ yield io_copyFile(srcFile, destFile, force);
}
}
// Change the mode for the newly created directory
- yield io_util_chmod(destDir, (yield io_util_stat(sourceDir)).mode);
+ yield chmod(destDir, (yield stat(sourceDir)).mode);
});
}
// Buffered file copy
-function lib_io_copyFile(srcFile, destFile, force) {
- return lib_io_awaiter(this, void 0, void 0, function* () {
- if ((yield io_util_lstat(srcFile)).isSymbolicLink()) {
+function io_copyFile(srcFile, destFile, force) {
+ return io_awaiter(this, void 0, void 0, function* () {
+ if ((yield lstat(srcFile)).isSymbolicLink()) {
// unlink/re-link it
try {
- yield io_util_lstat(destFile);
- yield io_util_unlink(destFile);
+ yield lstat(destFile);
+ yield unlink(destFile);
}
catch (e) {
// Try to override file permission
if (e.code === 'EPERM') {
- yield io_util_chmod(destFile, '0666');
- yield io_util_unlink(destFile);
+ yield chmod(destFile, '0666');
+ yield unlink(destFile);
}
// other errors = it doesn't exist, no work to do
}
// Copy over symlink
- const symlinkFull = yield io_util_readlink(srcFile);
- yield io_util_symlink(symlinkFull, destFile, io_util_IS_WINDOWS ? 'junction' : null);
+ const symlinkFull = yield readlink(srcFile);
+ yield symlink(symlinkFull, destFile, IS_WINDOWS ? 'junction' : null);
}
- else if (!(yield io_util_exists(destFile)) || force) {
- yield io_util_copyFile(srcFile, destFile);
+ else if (!(yield exists(destFile)) || force) {
+ yield copyFile(srcFile, destFile);
}
});
}
//# sourceMappingURL=io.js.map
+// EXTERNAL MODULE: external "crypto"
+var external_crypto_ = __nccwpck_require__(6982);
// EXTERNAL MODULE: ./node_modules/semver/index.js
var node_modules_semver = __nccwpck_require__(2088);
+// EXTERNAL MODULE: external "os"
+var external_os_ = __nccwpck_require__(857);
+// EXTERNAL MODULE: external "child_process"
+var external_child_process_ = __nccwpck_require__(5317);
;// CONCATENATED MODULE: ./node_modules/@actions/tool-cache/lib/manifest.js
var manifest_awaiter = (undefined && undefined.__awaiter) || function (thisArg, _arguments, P, generator) {
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
@@ -39241,11 +39445,11 @@ const _internal = {
const lsbReleaseFile = '/etc/lsb-release';
const osReleaseFile = '/etc/os-release';
let contents = '';
- if (external_fs_namespaceObject.existsSync(lsbReleaseFile)) {
- contents = external_fs_namespaceObject.readFileSync(lsbReleaseFile).toString();
+ if (external_fs_.existsSync(lsbReleaseFile)) {
+ contents = external_fs_.readFileSync(lsbReleaseFile).toString();
}
- else if (external_fs_namespaceObject.existsSync(osReleaseFile)) {
- contents = external_fs_namespaceObject.readFileSync(osReleaseFile).toString();
+ else if (external_fs_.existsSync(osReleaseFile)) {
+ contents = external_fs_.readFileSync(osReleaseFile).toString();
}
return contents;
}
@@ -39326,6 +39530,8 @@ function _readLinuxVersionFile() {
return _internal.readLinuxVersionFile();
}
//# sourceMappingURL=manifest.js.map
+// EXTERNAL MODULE: ./node_modules/@actions/http-client/lib/index.js + 1 modules
+var lib = __nccwpck_require__(4942);
;// CONCATENATED MODULE: external "stream"
const external_stream_namespaceObject = __WEBPACK_EXTERNAL_createRequire(import.meta.url)("stream");
// EXTERNAL MODULE: external "util"
@@ -39368,11 +39574,11 @@ class RetryHelper {
if (isRetryable && !isRetryable(err)) {
throw err;
}
- info(err.message);
+ lib_core/* info */.pq(err.message);
}
// Sleep
const seconds = this.getSleepAmount();
- info(`Waiting ${seconds} seconds before trying again`);
+ lib_core/* info */.pq(`Waiting ${seconds} seconds before trying again`);
yield this.sleep(seconds);
attempt++;
}
@@ -39436,10 +39642,10 @@ const userAgent = 'actions/tool-cache';
*/
function downloadTool(url, dest, auth, headers) {
return tool_cache_awaiter(this, void 0, void 0, function* () {
- dest = dest || external_path_.join(_getTempDirectory(), external_crypto_namespaceObject.randomUUID());
- yield io_mkdirP(external_path_.dirname(dest));
- core_debug(`Downloading ${url}`);
- core_debug(`Destination ${dest}`);
+ dest = dest || external_path_.join(_getTempDirectory(), external_crypto_.randomUUID());
+ yield mkdirP(external_path_.dirname(dest));
+ lib_core/* debug */.Yz(`Downloading ${url}`);
+ lib_core/* debug */.Yz(`Destination ${dest}`);
const maxAttempts = 3;
const minSeconds = _getGlobal('TEST_DOWNLOAD_TOOL_RETRY_MIN_SECONDS', 10);
const maxSeconds = _getGlobal('TEST_DOWNLOAD_TOOL_RETRY_MAX_SECONDS', 20);
@@ -39462,15 +39668,15 @@ function downloadTool(url, dest, auth, headers) {
}
function downloadToolAttempt(url, dest, auth, headers) {
return tool_cache_awaiter(this, void 0, void 0, function* () {
- if (external_fs_namespaceObject.existsSync(dest)) {
+ if (external_fs_.existsSync(dest)) {
throw new Error(`Destination file path ${dest} already exists`);
}
// Get the response headers
- const http = new lib_HttpClient(userAgent, [], {
+ const http = new lib/* HttpClient */.Qq(userAgent, [], {
allowRetries: false
});
if (auth) {
- core_debug('set auth');
+ lib_core/* debug */.Yz('set auth');
if (headers === undefined) {
headers = {};
}
@@ -39479,7 +39685,7 @@ function downloadToolAttempt(url, dest, auth, headers) {
const response = yield http.get(url, headers);
if (response.message.statusCode !== 200) {
const err = new HTTPError(response.message.statusCode);
- core_debug(`Failed to download from "${url}". Code(${response.message.statusCode}) Message(${response.message.statusMessage})`);
+ lib_core/* debug */.Yz(`Failed to download from "${url}". Code(${response.message.statusCode}) Message(${response.message.statusMessage})`);
throw err;
}
// Download the response body
@@ -39488,20 +39694,20 @@ function downloadToolAttempt(url, dest, auth, headers) {
const readStream = responseMessageFactory();
let succeeded = false;
try {
- yield pipeline(readStream, external_fs_namespaceObject.createWriteStream(dest));
- core_debug('download complete');
+ yield pipeline(readStream, external_fs_.createWriteStream(dest));
+ lib_core/* debug */.Yz('download complete');
succeeded = true;
return dest;
}
finally {
// Error, delete dest before retry
if (!succeeded) {
- core_debug('download failed');
+ lib_core/* debug */.Yz('download failed');
try {
- yield io_rmRF(dest);
+ yield rmRF(dest);
}
catch (err) {
- core_debug(`Failed to delete '${dest}'. ${err.message}`);
+ lib_core/* debug */.Yz(`Failed to delete '${dest}'. ${err.message}`);
}
}
}
@@ -39596,9 +39802,9 @@ function extractTar(file_1, dest_1) {
// Create dest
dest = yield _createExtractFolder(dest);
// Determine whether GNU tar
- core_debug('Checking tar --version');
+ lib_core/* debug */.Yz('Checking tar --version');
let versionOutput = '';
- yield exec_exec('tar --version', [], {
+ yield (0,lib_exec/* exec */.m)('tar --version', [], {
ignoreReturnCode: true,
silent: true,
listeners: {
@@ -39606,7 +39812,7 @@ function extractTar(file_1, dest_1) {
stderr: (data) => (versionOutput += data.toString())
}
});
- core_debug(versionOutput.trim());
+ lib_core/* debug */.Yz(versionOutput.trim());
const isGnuTar = versionOutput.toUpperCase().includes('GNU TAR');
// Initialize args
let args;
@@ -39616,7 +39822,7 @@ function extractTar(file_1, dest_1) {
else {
args = [flags];
}
- if (isDebug() && !flags.includes('v')) {
+ if (lib_core/* isDebug */._o() && !flags.includes('v')) {
args.push('-v');
}
let destArg = dest;
@@ -39634,7 +39840,7 @@ function extractTar(file_1, dest_1) {
args.push('--overwrite');
}
args.push('-C', destArg, '-f', fileArg);
- yield exec_exec(`tar`, args);
+ yield (0,lib_exec/* exec */.m)(`tar`, args);
return dest;
});
}
@@ -39762,19 +39968,19 @@ function extractZipNix(file, dest) {
function cacheDir(sourceDir, tool, version, arch) {
return tool_cache_awaiter(this, void 0, void 0, function* () {
version = node_modules_semver.clean(version) || version;
- arch = arch || external_os_namespaceObject.arch();
- core_debug(`Caching tool ${tool} ${version} ${arch}`);
- core_debug(`source dir: ${sourceDir}`);
- if (!external_fs_namespaceObject.statSync(sourceDir).isDirectory()) {
+ arch = arch || external_os_.arch();
+ lib_core/* debug */.Yz(`Caching tool ${tool} ${version} ${arch}`);
+ lib_core/* debug */.Yz(`source dir: ${sourceDir}`);
+ if (!external_fs_.statSync(sourceDir).isDirectory()) {
throw new Error('sourceDir is not a directory');
}
// Create the tool dir
const destPath = yield _createToolPath(tool, version, arch);
// copy each child item. do not move. move can fail on Windows
// due to anti-virus software having an open handle on a file.
- for (const itemName of external_fs_namespaceObject.readdirSync(sourceDir)) {
+ for (const itemName of external_fs_.readdirSync(sourceDir)) {
const s = external_path_.join(sourceDir, itemName);
- yield lib_io_cp(s, destPath, { recursive: true });
+ yield io_cp(s, destPath, { recursive: true });
}
// write .complete
_completeToolPath(tool, version, arch);
@@ -39826,7 +40032,7 @@ function find(toolName, versionSpec, arch) {
if (!versionSpec) {
throw new Error('versionSpec parameter is required');
}
- arch = arch || external_os_namespaceObject.arch();
+ arch = arch || external_os_.arch();
// attempt to resolve an explicit version
if (!isExplicitVersion(versionSpec)) {
const localVersions = findAllVersions(toolName, arch);
@@ -39838,13 +40044,13 @@ function find(toolName, versionSpec, arch) {
if (versionSpec) {
versionSpec = node_modules_semver.clean(versionSpec) || '';
const cachePath = external_path_.join(_getCacheDirectory(), toolName, versionSpec, arch);
- core_debug(`checking cache: ${cachePath}`);
- if (external_fs_namespaceObject.existsSync(cachePath) && external_fs_namespaceObject.existsSync(`${cachePath}.complete`)) {
- core_debug(`Found tool in cache ${toolName} ${versionSpec} ${arch}`);
+ lib_core/* debug */.Yz(`checking cache: ${cachePath}`);
+ if (external_fs_.existsSync(cachePath) && external_fs_.existsSync(`${cachePath}.complete`)) {
+ lib_core/* debug */.Yz(`Found tool in cache ${toolName} ${versionSpec} ${arch}`);
toolPath = cachePath;
}
else {
- core_debug('not found');
+ lib_core/* debug */.Yz('not found');
}
}
return toolPath;
@@ -39857,14 +40063,14 @@ function find(toolName, versionSpec, arch) {
*/
function findAllVersions(toolName, arch) {
const versions = [];
- arch = arch || external_os_namespaceObject.arch();
+ arch = arch || external_os_.arch();
const toolPath = external_path_.join(_getCacheDirectory(), toolName);
- if (external_fs_namespaceObject.existsSync(toolPath)) {
- const children = external_fs_namespaceObject.readdirSync(toolPath);
+ if (external_fs_.existsSync(toolPath)) {
+ const children = external_fs_.readdirSync(toolPath);
for (const child of children) {
if (isExplicitVersion(child)) {
const fullPath = external_path_.join(toolPath, child, arch || '');
- if (external_fs_namespaceObject.existsSync(fullPath) && external_fs_namespaceObject.existsSync(`${fullPath}.complete`)) {
+ if (external_fs_.existsSync(fullPath) && external_fs_.existsSync(`${fullPath}.complete`)) {
versions.push(child);
}
}
@@ -39919,28 +40125,28 @@ function _createExtractFolder(dest) {
return tool_cache_awaiter(this, void 0, void 0, function* () {
if (!dest) {
// create a temp dir
- dest = external_path_.join(_getTempDirectory(), external_crypto_namespaceObject.randomUUID());
+ dest = external_path_.join(_getTempDirectory(), external_crypto_.randomUUID());
}
- yield io_mkdirP(dest);
+ yield mkdirP(dest);
return dest;
});
}
function _createToolPath(tool, version, arch) {
return tool_cache_awaiter(this, void 0, void 0, function* () {
const folderPath = external_path_.join(_getCacheDirectory(), tool, node_modules_semver.clean(version) || version, arch || '');
- core_debug(`destination ${folderPath}`);
+ lib_core/* debug */.Yz(`destination ${folderPath}`);
const markerPath = `${folderPath}.complete`;
- yield io_rmRF(folderPath);
- yield io_rmRF(markerPath);
- yield io_mkdirP(folderPath);
+ yield rmRF(folderPath);
+ yield rmRF(markerPath);
+ yield mkdirP(folderPath);
return folderPath;
});
}
function _completeToolPath(tool, version, arch) {
const folderPath = external_path_.join(_getCacheDirectory(), tool, node_modules_semver.clean(version) || version, arch || '');
const markerPath = `${folderPath}.complete`;
- external_fs_namespaceObject.writeFileSync(markerPath, '');
- core_debug('finished caching tool');
+ external_fs_.writeFileSync(markerPath, '');
+ lib_core/* debug */.Yz('finished caching tool');
}
/**
* Check if version string is explicit
@@ -39949,9 +40155,9 @@ function _completeToolPath(tool, version, arch) {
*/
function isExplicitVersion(versionSpec) {
const c = node_modules_semver.clean(versionSpec) || '';
- core_debug(`isExplicit: ${c}`);
+ lib_core/* debug */.Yz(`isExplicit: ${c}`);
const valid = node_modules_semver.valid(c) != null;
- core_debug(`explicit? ${valid}`);
+ lib_core/* debug */.Yz(`explicit? ${valid}`);
return valid;
}
/**
@@ -39962,7 +40168,7 @@ function isExplicitVersion(versionSpec) {
*/
function evaluateVersions(versions, versionSpec) {
let version = '';
- core_debug(`evaluating ${versions.length} versions`);
+ lib_core/* debug */.Yz(`evaluating ${versions.length} versions`);
versions = versions.sort((a, b) => {
if (node_modules_semver.gt(a, b)) {
return 1;
@@ -39978,10 +40184,10 @@ function evaluateVersions(versions, versionSpec) {
}
}
if (version) {
- core_debug(`matched: ${version}`);
+ lib_core/* debug */.Yz(`matched: ${version}`);
}
else {
- core_debug('match not found');
+ lib_core/* debug */.Yz('match not found');
}
return version;
}
@@ -40056,7 +40262,7 @@ async function resolveDownloadUrl(version) {
const suffix = getAssetSuffix();
const assetName = `apm-${suffix}.tar.gz`;
if (version === 'latest') {
- const token = getInput('github-token');
+ const token = lib_core/* getInput */.V4('github-token');
const headers = { 'Accept': 'application/vnd.github+json' };
if (token)
headers['Authorization'] = `Bearer ${token}`;
@@ -40085,20 +40291,20 @@ async function resolveDownloadUrl(version) {
* Uses @actions/tool-cache for downloading, extracting, and caching.
*/
async function ensureApmInstalled() {
- const apmVersion = getInput('apm-version') || 'latest';
+ const apmVersion = lib_core/* getInput */.V4('apm-version') || 'latest';
// Check if already available
- const rc = await exec_exec('apm', ['--version'], { ignoreReturnCode: true, silent: true }).catch(() => 1);
+ const rc = await lib_exec/* exec */.m('apm', ['--version'], { ignoreReturnCode: true, silent: true }).catch(() => 1);
if (rc === 0) {
- info('APM already installed');
+ lib_core/* info */.pq('APM already installed');
return;
}
- info(`Installing APM (version: ${apmVersion})...`);
+ lib_core/* info */.pq(`Installing APM (version: ${apmVersion})...`);
const { url, resolvedVersion } = await resolveDownloadUrl(apmVersion);
const suffix = getAssetSuffix();
// Check tool-cache first
let toolDir = find('apm', resolvedVersion);
if (!toolDir) {
- info(`Downloading APM ${resolvedVersion} from ${url}`);
+ lib_core/* info */.pq(`Downloading APM ${resolvedVersion} from ${url}`);
const downloadPath = await downloadTool(url);
const extractedDir = await extractTar(downloadPath);
// The tarball extracts to apm-{os}-{arch}/ containing the apm binary
@@ -40106,16 +40312,16 @@ async function ensureApmInstalled() {
toolDir = await cacheDir(innerDir, 'apm', resolvedVersion);
}
else {
- info(`APM ${resolvedVersion} found in tool cache`);
+ lib_core/* info */.pq(`APM ${resolvedVersion} found in tool cache`);
}
// Add to PATH
- addPath(toolDir);
+ lib_core/* addPath */.fM(toolDir);
// Verify
- const verify = await exec_exec('apm', ['--version'], { ignoreReturnCode: true });
+ const verify = await lib_exec/* exec */.m('apm', ['--version'], { ignoreReturnCode: true });
if (verify !== 0) {
throw new Error('APM installation verification failed');
}
- info(`APM ${resolvedVersion} installed successfully`);
+ lib_core/* info */.pq(`APM ${resolvedVersion} installed successfully`);
}
;// CONCATENATED MODULE: ./node_modules/@actions/glob/lib/internal-glob-options-helper.js
@@ -40134,23 +40340,23 @@ function getOptions(copy) {
if (copy) {
if (typeof copy.followSymbolicLinks === 'boolean') {
result.followSymbolicLinks = copy.followSymbolicLinks;
- core_debug(`followSymbolicLinks '${result.followSymbolicLinks}'`);
+ lib_core/* debug */.Yz(`followSymbolicLinks '${result.followSymbolicLinks}'`);
}
if (typeof copy.implicitDescendants === 'boolean') {
result.implicitDescendants = copy.implicitDescendants;
- core_debug(`implicitDescendants '${result.implicitDescendants}'`);
+ lib_core/* debug */.Yz(`implicitDescendants '${result.implicitDescendants}'`);
}
if (typeof copy.matchDirectories === 'boolean') {
result.matchDirectories = copy.matchDirectories;
- core_debug(`matchDirectories '${result.matchDirectories}'`);
+ lib_core/* debug */.Yz(`matchDirectories '${result.matchDirectories}'`);
}
if (typeof copy.omitBrokenSymbolicLinks === 'boolean') {
result.omitBrokenSymbolicLinks = copy.omitBrokenSymbolicLinks;
- core_debug(`omitBrokenSymbolicLinks '${result.omitBrokenSymbolicLinks}'`);
+ lib_core/* debug */.Yz(`omitBrokenSymbolicLinks '${result.omitBrokenSymbolicLinks}'`);
}
if (typeof copy.excludeHiddenFiles === 'boolean') {
result.excludeHiddenFiles = copy.excludeHiddenFiles;
- core_debug(`excludeHiddenFiles '${result.excludeHiddenFiles}'`);
+ lib_core/* debug */.Yz(`excludeHiddenFiles '${result.excludeHiddenFiles}'`);
}
}
return result;
@@ -40635,7 +40841,7 @@ class Pattern {
}
// Replace leading `~` segment
else if (pattern === '~' || pattern.startsWith(`~${external_path_.sep}`)) {
- homedir = homedir || external_os_namespaceObject.homedir();
+ homedir = homedir || external_os_.homedir();
external_assert_(homedir, 'Unable to determine HOME directory');
external_assert_(hasAbsoluteRoot(homedir), `Expected HOME directory to be a rooted path. Actual '${homedir}'`);
pattern = Pattern.globEscape(homedir) + pattern.substr(1);
@@ -40826,12 +41032,12 @@ class DefaultGlobber {
// Push the search paths
const stack = [];
for (const searchPath of getSearchPaths(patterns)) {
- core_debug(`Search path '${searchPath}'`);
+ lib_core/* debug */.Yz(`Search path '${searchPath}'`);
// Exists?
try {
// Intentionally using lstat. Detection for broken symlink
// will be performed later (if following symlinks).
- yield __await(external_fs_namespaceObject.promises.lstat(searchPath));
+ yield __await(external_fs_.promises.lstat(searchPath));
}
catch (err) {
if (err.code === 'ENOENT') {
@@ -40876,7 +41082,7 @@ class DefaultGlobber {
}
// Push the child items in reverse
const childLevel = item.level + 1;
- const childItems = (yield __await(external_fs_namespaceObject.promises.readdir(item.path))).map(x => new SearchState(external_path_.join(item.path, x), childLevel));
+ const childItems = (yield __await(external_fs_.promises.readdir(item.path))).map(x => new SearchState(external_path_.join(item.path, x), childLevel));
stack.push(...childItems.reverse());
}
// File
@@ -40920,12 +41126,12 @@ class DefaultGlobber {
if (options.followSymbolicLinks) {
try {
// Use `stat` (following symlinks)
- stats = yield external_fs_namespaceObject.promises.stat(item.path);
+ stats = yield external_fs_.promises.stat(item.path);
}
catch (err) {
if (err.code === 'ENOENT') {
if (options.omitBrokenSymbolicLinks) {
- core_debug(`Broken symlink '${item.path}'`);
+ lib_core/* debug */.Yz(`Broken symlink '${item.path}'`);
return undefined;
}
throw new Error(`No information found for the path '${item.path}'. This may indicate a broken symbolic link.`);
@@ -40935,19 +41141,19 @@ class DefaultGlobber {
}
else {
// Use `lstat` (not following symlinks)
- stats = yield external_fs_namespaceObject.promises.lstat(item.path);
+ stats = yield external_fs_.promises.lstat(item.path);
}
// Note, isDirectory() returns false for the lstat of a symlink
if (stats.isDirectory() && options.followSymbolicLinks) {
// Get the realpath
- const realPath = yield external_fs_namespaceObject.promises.realpath(item.path);
+ const realPath = yield external_fs_.promises.realpath(item.path);
// Fixup the traversal chain to match the item level
while (traversalChain.length >= item.level) {
traversalChain.pop();
}
// Test for a cycle
if (traversalChain.some((x) => x === realPath)) {
- core_debug(`Symlink cycle detected for path '${item.path}' and realpath '${realPath}'`);
+ lib_core/* debug */.Yz(`Symlink cycle detected for path '${item.path}' and realpath '${realPath}'`);
return undefined;
}
// Update the traversal chain
@@ -41120,17 +41326,17 @@ async function resolveLocalBundle(pattern, workspaceDir) {
async function extractBundle(bundlePath, outputDir) {
const resolvedBundle = external_path_.resolve(bundlePath);
const resolvedOutput = external_path_.resolve(outputDir);
- if (!external_fs_namespaceObject.existsSync(resolvedBundle)) {
+ if (!external_fs_.existsSync(resolvedBundle)) {
throw new Error(`Bundle not found: ${bundlePath}`);
}
// Try apm unpack first (provides verification)
- const apmAvailable = await exec_exec('apm', ['--version'], {
+ const apmAvailable = await lib_exec/* exec */.m('apm', ['--version'], {
ignoreReturnCode: true,
silent: true,
}).catch(() => 1) === 0;
if (apmAvailable) {
- info('Using apm unpack (with verification)...');
- const rc = await exec_exec('apm', ['unpack', resolvedBundle, '-o', resolvedOutput], {
+ lib_core/* info */.pq('Using apm unpack (with verification)...');
+ const rc = await lib_exec/* exec */.m('apm', ['unpack', resolvedBundle, '-o', resolvedOutput], {
ignoreReturnCode: true,
});
if (rc !== 0) {
@@ -41147,8 +41353,8 @@ async function extractBundle(bundlePath, outputDir) {
// deployable output — the same files that `apm unpack` (the primary path)
// intentionally never copies. Leaking them into a git checkout dirties the
// workspace and breaks downstream `git checkout` steps. See microsoft/apm-action#26.
- info('APM not available — extracting with tar (no verification)...');
- const rc = await exec_exec('tar', [
+ lib_core/* info */.pq('APM not available — extracting with tar (no verification)...');
+ const rc = await lib_exec/* exec */.m('tar', [
'xzf', resolvedBundle,
'-C', resolvedOutput,
'--strip-components=1',
@@ -41177,8 +41383,8 @@ async function runPackStep(workingDir, opts) {
if (opts.archive) {
args.push('--archive');
}
- info(`Running: apm ${args.join(' ')}`);
- const rc = await exec_exec('apm', args, {
+ lib_core/* info */.pq(`Running: apm ${args.join(' ')}`);
+ const rc = await lib_exec/* exec */.m('apm', args, {
cwd: resolvedDir,
ignoreReturnCode: true,
env: { ...process.env },
@@ -41188,7 +41394,7 @@ async function runPackStep(workingDir, opts) {
}
// Find the produced bundle in build/
const bundlePath = findBundle(buildDir, opts.archive);
- info(`Bundle produced: ${bundlePath}`);
+ lib_core/* info */.pq(`Bundle produced: ${bundlePath}`);
return bundlePath;
}
/**
@@ -41197,10 +41403,10 @@ async function runPackStep(workingDir, opts) {
* For directories: look for non-hidden directories.
*/
function findBundle(buildDir, archive) {
- if (!external_fs_namespaceObject.existsSync(buildDir)) {
+ if (!external_fs_.existsSync(buildDir)) {
throw new Error(`Build directory not found: ${buildDir}`);
}
- const entries = external_fs_namespaceObject.readdirSync(buildDir);
+ const entries = external_fs_.readdirSync(buildDir);
if (archive) {
const archives = entries.filter(e => e.endsWith('.tar.gz')).sort();
if (archives.length === 0) {
@@ -41215,7 +41421,7 @@ function findBundle(buildDir, archive) {
const dirs = entries.filter(e => {
if (e.startsWith('.'))
return false;
- return external_fs_namespaceObject.statSync(external_path_.join(buildDir, e)).isDirectory();
+ return external_fs_.statSync(external_path_.join(buildDir, e)).isDirectory();
}).sort();
if (dirs.length === 0) {
throw new Error('No bundle directory found in build directory after apm pack');
@@ -41233,7 +41439,7 @@ function countDeployedFiles(rootDir) {
const claudeDir = external_path_.join(rootDir, '.claude');
let count = 0;
for (const dir of [githubDir, claudeDir]) {
- if (external_fs_namespaceObject.existsSync(dir)) {
+ if (external_fs_.existsSync(dir)) {
count += countFilesRecursive(dir);
}
}
@@ -41241,7 +41447,7 @@ function countDeployedFiles(rootDir) {
}
function countFilesRecursive(dir) {
let count = 0;
- for (const entry of external_fs_namespaceObject.readdirSync(dir, { withFileTypes: true })) {
+ for (const entry of external_fs_.readdirSync(dir, { withFileTypes: true })) {
if (entry.name.startsWith('.'))
continue;
const fullPath = external_path_.join(dir, entry.name);
@@ -41277,12 +41483,13 @@ function countFilesRecursive(dir) {
async function run() {
try {
// 0. Resolve working directory and read mode flags
- const workingDir = getInput('working-directory') || '.';
+ const workingDir = lib_core/* getInput */.V4('working-directory') || '.';
const resolvedDir = external_path_.resolve(workingDir);
- const bundleInput = getInput('bundle').trim();
- const packInput = getInput('pack') === 'true';
- const isolated = getInput('isolated') === 'true';
- const auditReportInput = getInput('audit-report').trim();
+ const bundleInput = lib_core/* getInput */.V4('bundle').trim();
+ const bundlesFileInput = lib_core/* getInput */.V4('bundles-file').trim();
+ const packInput = lib_core/* getInput */.V4('pack') === 'true';
+ const isolated = lib_core/* getInput */.V4('isolated') === 'true';
+ const auditReportInput = lib_core/* getInput */.V4('audit-report').trim();
// Pass github-token input to APM subprocess as GITHUB_TOKEN.
// GitHub Actions does not auto-export input values as env vars —
// without this, APM runs unauthenticated (rate-limited, no private repo access).
@@ -41297,9 +41504,9 @@ async function run() {
// auto-setting GITHUB_APM_PAT to the default github.token would shadow the
// caller's intentional GITHUB_TOKEN, causing auth failures for cross-org or
// private-repo access.
- const githubToken = getInput('github-token');
+ const githubToken = lib_core/* getInput */.V4('github-token');
if (githubToken) {
- core_setSecret(githubToken);
+ lib_core/* setSecret */.Pq(githubToken);
const callerProvidedToken = !!process.env.GITHUB_TOKEN;
if (!process.env.GITHUB_TOKEN) {
process.env.GITHUB_TOKEN = githubToken;
@@ -41308,9 +41515,15 @@ async function run() {
process.env.GITHUB_APM_PAT ??= githubToken;
}
}
- // Validate inputs before touching the filesystem.
- if (bundleInput && packInput) {
- throw new Error("'pack' and 'bundle' inputs are mutually exclusive");
+ // 3-way mutex: at most one of pack / bundle / bundles-file.
+ const modeFlags = [
+ packInput && 'pack',
+ bundleInput && 'bundle',
+ bundlesFileInput && 'bundles-file',
+ ].filter(Boolean);
+ if (modeFlags.length > 1) {
+ throw new Error(`inputs 'pack', 'bundle', and 'bundles-file' are mutually exclusive `
+ + `(got: ${modeFlags.join(', ')}). Pick exactly one mode per step.`);
}
// Directory creation contract:
// - isolated / pack / bundle (restore) modes: the action owns the workspace
@@ -41319,16 +41532,16 @@ async function run() {
// - non-isolated mode: the caller owns the project directory (which must
// contain apm.yml). If it doesn't exist, we fail fast with a clear message
// rather than silently creating an empty directory that would just fail later.
- const actionOwnsDir = isolated || packInput || !!bundleInput;
+ const actionOwnsDir = isolated || packInput || !!bundleInput || !!bundlesFileInput;
if (actionOwnsDir) {
- external_fs_namespaceObject.mkdirSync(resolvedDir, { recursive: true });
+ external_fs_.mkdirSync(resolvedDir, { recursive: true });
}
- else if (!external_fs_namespaceObject.existsSync(resolvedDir)) {
+ else if (!external_fs_.existsSync(resolvedDir)) {
throw new Error(`Working directory does not exist: ${resolvedDir}. ` +
'In non-isolated mode the directory must already contain your project (with apm.yml). ' +
'Use isolated: true if you want the action to create it automatically.');
}
- info(`Working directory: ${resolvedDir}`);
+ lib_core/* info */.pq(`Working directory: ${resolvedDir}`);
// Resolve audit report path
let auditReportPath;
if (auditReportInput) {
@@ -41363,7 +41576,7 @@ async function run() {
if (bundleInput) {
await ensureApmInstalled();
const bundlePath = await resolveLocalBundle(bundleInput, resolvedDir);
- info(`Restoring bundle: ${bundlePath}`);
+ lib_core/* info */.pq(`Restoring bundle: ${bundlePath}`);
const result = await extractBundle(bundlePath, resolvedDir);
// Restore mode now installs APM up-front, so the verified `apm unpack`
// path is the expected outcome. The unverified branch only runs if APM
@@ -41372,21 +41585,60 @@ async function run() {
const verifiedMsg = result.verified
? ' (verified)'
: ' (unverified — APM install did not complete; see earlier install logs)';
- info(`Restored ${result.files} file(s)${verifiedMsg}`);
+ lib_core/* info */.pq(`Restored ${result.files} file(s)${verifiedMsg}`);
const primitivesPath = external_path_.join(resolvedDir, '.github');
- setOutput('primitives-path', primitivesPath);
+ lib_core/* setOutput */.uH('primitives-path', primitivesPath);
// Run audit on unpacked bundle if report requested
if (auditReportPath) {
await runAuditReport(resolvedDir, auditReportPath);
}
- setOutput('success', 'true');
- info('APM action completed successfully (restore mode)');
+ lib_core/* setOutput */.uH('success', 'true');
+ lib_core/* info */.pq('APM action completed successfully (restore mode)');
+ return;
+ }
+ // MULTI-BUNDLE RESTORE MODE
+ if (bundlesFileInput) {
+ const { parseBundleListFile, previewBundleFiles, logCollisionPolicy, restoreMultiBundles, } = await __nccwpck_require__.e(/* import() */ 970).then(__nccwpck_require__.bind(__nccwpck_require__, 2970));
+ const bundles = parseBundleListFile(bundlesFileInput, {
+ workspaceDir: resolvedDir,
+ });
+ lib_core/* info */.pq(`Multi-bundle restore: ${bundles.length} bundle(s) from ${bundlesFileInput}`);
+ // Surface the collision policy BEFORE any work happens so users are
+ // never surprised by silent overwrites. Wired to previewBundleFiles
+ // so the call site is real today; per-file SHA collision detection
+ // ships in v1.6.0 (currently a no-op stub).
+ logCollisionPolicy(bundles.length);
+ const preview = await previewBundleFiles(bundles);
+ if (preview.differentSha.length > 0) {
+ lib_core/* warning */.$e(`Detected ${preview.differentSha.length} different-content collision(s) `
+ + `across bundles. Later bundles in the list will win.`);
+ }
+ if (preview.sameSha.length > 0) {
+ lib_core/* info */.pq(`Detected ${preview.sameSha.length} byte-identical file overlap(s) `
+ + `across bundles (benign duplicates).`);
+ }
+ // ensureApmInstalled() runs the install pipeline; restoreMultiBundles
+ // additionally probes `apm --version` as a defence-in-depth check so
+ // a transient install failure surfaces with a clear error before the
+ // first unpack rather than as a generic ENOENT mid-loop.
+ await ensureApmInstalled();
+ const result = await restoreMultiBundles(bundles, resolvedDir);
+ lib_core/* info */.pq(`Restored ${result.count} bundle(s) successfully into ${resolvedDir}`);
+ const primitivesPath = external_path_.join(resolvedDir, '.github');
+ lib_core/* setOutput */.uH('primitives-path', primitivesPath);
+ lib_core/* setOutput */.uH('bundles-restored', String(result.count));
+ // Run audit on merged workspace if requested
+ if (auditReportPath) {
+ await runAuditReport(resolvedDir, auditReportPath);
+ }
+ lib_core/* setOutput */.uH('success', 'true');
+ lib_core/* info */.pq('APM action completed successfully (multi-bundle restore mode)');
return;
}
// 1. Install APM CLI (install + pack modes)
await ensureApmInstalled();
// 2. Parse inputs
- const depsInput = getInput('dependencies').trim();
+ const depsInput = lib_core/* getInput */.V4('dependencies').trim();
// 3. Handle isolated mode: clear existing primitives, generate apm.yml from inline deps only.
// Directory was already created above (actionOwnsDir = true for isolated mode).
if (isolated) {
@@ -41402,7 +41654,7 @@ async function run() {
else {
// Default: install from apm.yml (if present), then add inline deps
const apmYmlPath = external_path_.join(resolvedDir, 'apm.yml');
- if (external_fs_namespaceObject.existsSync(apmYmlPath) || !depsInput) {
+ if (external_fs_.existsSync(apmYmlPath) || !depsInput) {
await runApm(['install'], resolvedDir);
}
// Install extra inline deps additively
@@ -41416,36 +41668,36 @@ async function run() {
await runAuditReport(resolvedDir, auditReportPath);
}
// 5. Run apm compile (opt-in)
- const compile = getInput('compile') === 'true';
+ const compile = lib_core/* getInput */.V4('compile') === 'true';
if (compile) {
- info('Compiling agent primitives...');
+ lib_core/* info */.pq('Compiling agent primitives...');
await runApm(['compile'], resolvedDir);
}
// 6. Verify deployment
const primitivesPath = external_path_.join(resolvedDir, '.github');
- info(`Primitives deployed to: ${primitivesPath}`);
- setOutput('primitives-path', primitivesPath);
+ lib_core/* info */.pq(`Primitives deployed to: ${primitivesPath}`);
+ lib_core/* setOutput */.uH('primitives-path', primitivesPath);
await listDeployed(primitivesPath);
// 7. Optionally run a script
- const script = getInput('script').trim();
+ const script = lib_core/* getInput */.V4('script').trim();
if (script) {
- info(`Running APM script: ${script}`);
+ lib_core/* info */.pq(`Running APM script: ${script}`);
await runApm(['run', script], resolvedDir);
}
// 8. Pack mode: produce bundle after install
if (packInput) {
- const target = getInput('target').trim() || undefined;
- const archive = getInput('archive') !== 'false';
+ const target = lib_core/* getInput */.V4('target').trim() || undefined;
+ const archive = lib_core/* getInput */.V4('archive') !== 'false';
const bundlePath = await runPackStep(resolvedDir, { target, archive });
- setOutput('bundle-path', bundlePath);
+ lib_core/* setOutput */.uH('bundle-path', bundlePath);
}
- setOutput('success', 'true');
- info('APM action completed successfully');
+ lib_core/* setOutput */.uH('success', 'true');
+ lib_core/* info */.pq('APM action completed successfully');
}
catch (error) {
const msg = error instanceof Error ? error.message : String(error);
- setOutput('success', 'false');
- setFailed(`APM action failed: ${msg}`);
+ lib_core/* setOutput */.uH('success', 'false');
+ lib_core/* setFailed */.C1(`APM action failed: ${msg}`);
}
}
/**
@@ -41454,36 +41706,36 @@ async function run() {
*/
async function runAuditReport(cwd, reportPath) {
// Check if apm is available (may not be in restore mode)
- const apmAvailable = await exec_exec('apm', ['--version'], {
+ const apmAvailable = await lib_exec/* exec */.m('apm', ['--version'], {
ignoreReturnCode: true,
silent: true,
}).catch(() => 1) === 0;
if (!apmAvailable) {
- warning('APM not installed — cannot generate audit report. '
+ lib_core/* warning */.$e('APM not installed — cannot generate audit report. '
+ 'Install APM for hidden-character audit coverage.');
return;
}
- info('Running content audit...');
- const auditRc = await exec_exec('apm', [
+ lib_core/* info */.pq('Running content audit...');
+ const auditRc = await lib_exec/* exec */.m('apm', [
'audit', '-f', 'sarif', '-o', reportPath,
], {
cwd,
ignoreReturnCode: true,
env: { ...process.env },
});
- if (external_fs_namespaceObject.existsSync(reportPath)) {
- setOutput('audit-report-path', reportPath);
- info(`Audit report generated: ${reportPath}`);
+ if (external_fs_.existsSync(reportPath)) {
+ lib_core/* setOutput */.uH('audit-report-path', reportPath);
+ lib_core/* info */.pq(`Audit report generated: ${reportPath}`);
}
if (auditRc === 1) {
- warning('APM audit found critical hidden-character findings — see SARIF report for details');
+ lib_core/* warning */.$e('APM audit found critical hidden-character findings — see SARIF report for details');
}
else if (auditRc === 2) {
- info('APM audit found warnings (non-critical) — see SARIF report for details');
+ lib_core/* info */.pq('APM audit found warnings (non-critical) — see SARIF report for details');
}
// Write markdown summary to $GITHUB_STEP_SUMMARY
try {
- const mdResult = await getExecOutput('apm', [
+ const mdResult = await lib_exec/* getExecOutput */.H('apm', [
'audit', '-f', 'markdown',
], {
cwd,
@@ -41491,7 +41743,7 @@ async function runAuditReport(cwd, reportPath) {
silent: true,
});
if (mdResult.stdout.trim()) {
- await summary
+ await lib_core/* summary */.z
.addRaw('APM Audit Report
\n\n')
.addRaw(mdResult.stdout)
.addRaw('\n ')
@@ -41500,7 +41752,7 @@ async function runAuditReport(cwd, reportPath) {
}
catch {
// Markdown summary is best-effort — don't fail the action
- core_debug('Could not generate markdown audit summary');
+ lib_core/* debug */.Yz('Could not generate markdown audit summary');
}
}
/**
@@ -41539,7 +41791,7 @@ function parseDependencies(input) {
* Install dependencies additively via `apm install `.
*/
async function installDeps(dir, deps) {
- info(`Installing ${deps.length} inline dependencies...`);
+ lib_core/* info */.pq(`Installing ${deps.length} inline dependencies...`);
for (const dep of deps) {
if (typeof dep === 'string') {
await runApm(['install', dep], dir);
@@ -41569,8 +41821,8 @@ function clearPrimitives(dir) {
const resolved = external_path_.resolve(dir);
const ghDir = external_path_.join(resolved, '.github');
// Nothing to clear — empty directory already satisfies isolated mode
- if (!external_fs_namespaceObject.existsSync(ghDir)) {
- info('No .github/ directory found — nothing to clear');
+ if (!external_fs_.existsSync(ghDir)) {
+ lib_core/* info */.pq('No .github/ directory found — nothing to clear');
return;
}
for (const sub of PRIMITIVE_DIRS) {
@@ -41580,9 +41832,9 @@ function clearPrimitives(dir) {
if (rel.startsWith('..') || external_path_.isAbsolute(rel)) {
throw new Error(`clearPrimitives: path traversal detected — "${subPath}" escapes working directory "${resolved}"`);
}
- if (external_fs_namespaceObject.existsSync(subPath)) {
- external_fs_namespaceObject.rmSync(subPath, { recursive: true });
- info(`Cleared .github/${sub}/`);
+ if (external_fs_.existsSync(subPath)) {
+ external_fs_.rmSync(subPath, { recursive: true });
+ lib_core/* info */.pq(`Cleared .github/${sub}/`);
}
}
}
@@ -41606,14 +41858,14 @@ function generateManifest(dir, deps) {
return entry;
});
const content = `name: inline-workflow\nversion: 1.0.0\ndependencies:\n apm:\n${depEntries.join('\n')}\n`;
- external_fs_namespaceObject.writeFileSync(apmYmlPath, content, 'utf-8');
- info(`Generated apm.yml with ${deps.length} dependencies (isolated mode)`);
+ external_fs_.writeFileSync(apmYmlPath, content, 'utf-8');
+ lib_core/* info */.pq(`Generated apm.yml with ${deps.length} dependencies (isolated mode)`);
}
/**
* Run an apm command in the given directory.
*/
async function runApm(args, cwd) {
- const rc = await exec_exec('apm', args, {
+ const rc = await lib_exec/* exec */.m('apm', args, {
cwd,
ignoreReturnCode: true,
env: { ...process.env },
@@ -41628,8 +41880,8 @@ async function runApm(args, cwd) {
* then per-file details.
*/
async function listDeployed(primitivesPath) {
- if (!external_fs_namespaceObject.existsSync(primitivesPath)) {
- info('No .github directory found after install — no primitives deployed');
+ if (!external_fs_.existsSync(primitivesPath)) {
+ lib_core/* info */.pq('No .github directory found after install — no primitives deployed');
return;
}
const subdirs = ['instructions', 'skills', 'agents', 'prompts'];
@@ -41637,21 +41889,21 @@ async function listDeployed(primitivesPath) {
let total = 0;
for (const sub of subdirs) {
const subPath = external_path_.join(primitivesPath, sub);
- if (external_fs_namespaceObject.existsSync(subPath)) {
- const files = external_fs_namespaceObject.readdirSync(subPath).filter(f => !f.startsWith('.'));
+ if (external_fs_.existsSync(subPath)) {
+ const files = external_fs_.readdirSync(subPath).filter(f => !f.startsWith('.'));
if (files.length > 0) {
counts[sub] = files;
total += files.length;
}
}
}
- const hasAgentsMd = external_fs_namespaceObject.existsSync(external_path_.join(primitivesPath, '..', 'AGENTS.md'));
+ const hasAgentsMd = external_fs_.existsSync(external_path_.join(primitivesPath, '..', 'AGENTS.md'));
if (total === 0) {
if (hasAgentsMd) {
- info('APM: no primitives deployed (AGENTS.md present)');
+ lib_core/* info */.pq('APM: no primitives deployed (AGENTS.md present)');
}
else {
- info('APM: no primitives deployed');
+ lib_core/* info */.pq('APM: no primitives deployed');
}
return;
}
@@ -41659,10 +41911,10 @@ async function listDeployed(primitivesPath) {
const breakdown = Object.entries(counts)
.map(([type, files]) => `${files.length} ${type}`)
.join(', ');
- info(`APM: ${total} primitives deployed (${breakdown})${hasAgentsMd ? ' + AGENTS.md' : ''}`);
+ lib_core/* info */.pq(`APM: ${total} primitives deployed (${breakdown})${hasAgentsMd ? ' + AGENTS.md' : ''}`);
// Per-file details (may get truncated — that's OK, headline has the key info)
for (const [sub, files] of Object.entries(counts)) {
- info(` ${sub}/: ${files.join(', ')}`);
+ lib_core/* info */.pq(` ${sub}/: ${files.join(', ')}`);
}
}
diff --git a/dist/multibundle.d.ts b/dist/multibundle.d.ts
new file mode 100644
index 0000000..b3d4015
--- /dev/null
+++ b/dist/multibundle.d.ts
@@ -0,0 +1,113 @@
+/**
+ * Env-var denylist stripped from the apm unpack subprocess (B7).
+ *
+ * Includes:
+ * - APM-recognised credentials: GITHUB_APM_PAT, ADO_APM_PAT.
+ * - GitHub CLI / Actions token aliases that APM may auto-detect now or in
+ * future releases: GITHUB_TOKEN, GH_TOKEN.
+ * - Runner-scoped tokens with high blast radius if exfiltrated by a malicious
+ * bundle's hypothetical lifecycle hook: ACTIONS_RUNTIME_TOKEN (cache write),
+ * ACTIONS_ID_TOKEN_REQUEST_TOKEN (OIDC federation).
+ *
+ * Defence-in-depth: `apm unpack` itself does not need any of these, and the
+ * restore-side multi-bundle path performs no authenticated network calls.
+ */
+export declare const TOKEN_ENV_DENYLIST: readonly string[];
+/** Default cap on the number of bundles a single list file may contain (B5). */
+export declare const DEFAULT_MAX_BUNDLES = 64;
+/** Options for parsing a bundle list file. */
+export interface ParseOptions {
+ /**
+ * Maximum number of bundles allowed.
+ * Defaults to APM_MAX_BUNDLES env var, then DEFAULT_MAX_BUNDLES (64).
+ */
+ maxBundles?: number;
+ /**
+ * Directory to resolve relative paths against.
+ * Defaults to GITHUB_WORKSPACE or cwd.
+ */
+ workspaceDir?: string;
+}
+/** A single collision between two bundles deploying the same target file. */
+export interface FileCollision {
+ /** Relative target path inside the workspace (e.g. ".github/skills/foo/SKILL.md"). */
+ targetPath: string;
+ /** Absolute path of the bundle that was overwritten (earlier in list). */
+ overwrittenBundle: string;
+ /** Absolute path of the bundle that won (later in list). */
+ winningBundle: string;
+}
+/** Collision report from a multi-bundle preview or restore. */
+export interface CollisionReport {
+ /** Files deployed by multiple bundles with byte-identical content. */
+ sameSha: FileCollision[];
+ /** Files deployed by multiple bundles with DIFFERENT content (last wins). */
+ differentSha: FileCollision[];
+}
+/** Result of a multi-bundle restore operation. */
+export interface RestoreResult {
+ /** Number of bundles successfully restored. */
+ count: number;
+ /** Collision report (populated during restore). */
+ collisions: CollisionReport;
+}
+/**
+ * Build a sanitised env for the apm unpack subprocess: process.env with the
+ * token denylist removed. Defence-in-depth so a malicious bundle's lifecycle
+ * hooks (if any are ever introduced) cannot exfiltrate the runner's auth.
+ */
+export declare function buildStrippedEnv(): Record;
+/**
+ * Parse a newline-separated bundle list file into validated, deduped paths.
+ *
+ * Rules:
+ * - File must exist and be readable (hard error with path + cwd).
+ * - UTF-8 only (hard error on decode failure).
+ * - Lines starting with '#' are comments (skipped).
+ * - Blank lines are skipped.
+ * - '..' segment in any path -> reject with line number (B3).
+ * - Relative paths resolved against opts.workspaceDir; rejected if they escape it (B1).
+ * - Absolute paths allowed (matches existing bundle: behaviour, B1).
+ * - Each entry must end in `.tar.gz` (defence-in-depth + clear early failure
+ * if a user accidentally points at a directory or wrong file). Glob patterns
+ * are NOT expanded; use `find ... | sort` to generate the list yourself.
+ * - Empty list after stripping -> hard error.
+ * - Duplicates deduped silently (first occurrence wins).
+ * - Cap at opts.maxBundles (default 64, env APM_MAX_BUNDLES) (B5).
+ */
+export declare function parseBundleListFile(filePath: string, opts?: ParseOptions): string[];
+/**
+ * Preview file collisions across N bundles without extracting.
+ *
+ * NOTE: Stubbed for v1.5.0 -- returns an empty CollisionReport. Full
+ * implementation (which would shell out to `apm unpack --dry-run` and
+ * aggregate file lists across bundles, distinguishing same-SHA from
+ * different-SHA overlaps) is planned for v1.6.0. The restore loop is NOT
+ * blocked on this; the policy is documented up-front via
+ * `logCollisionPolicy()` so users are not surprised by silent overwrites.
+ *
+ * The function is wired into the runner today so its call site is real,
+ * not dead code -- the v1.6.0 follow-up only swaps the implementation.
+ */
+export declare function previewBundleFiles(bundles: string[]): Promise;
+/**
+ * Emit a single, explicit policy banner BEFORE the restore loop runs so the
+ * user is never surprised by silent overwrites. No-op for the single-bundle
+ * case (no possible collisions). Intentionally `core.warning` not `core.info`
+ * so it is annotated visibly in the GitHub Actions summary.
+ */
+export declare function logCollisionPolicy(bundleCount: number): void;
+/**
+ * Restore N bundles into the same workspace directory, in caller-specified order.
+ *
+ * - Verifies `apm` is on PATH (B4: hard fail, no fallback).
+ * - Loops through bundles in order, calling `apm unpack -o `.
+ * - Subprocess env has GITHUB_APM_PAT, ADO_APM_PAT, GITHUB_TOKEN stripped (B7).
+ * - Subprocess uses argv array, not shell string (B8).
+ * - Fail-fast: if bundle K fails, throw with index K, path, and stderr.
+ * - Returns count + empty CollisionReport (collision detection deferred).
+ *
+ * @param bundles Ordered array of absolute bundle paths (from parseBundleListFile).
+ * @param outputDir Workspace directory to restore into.
+ */
+export declare function restoreMultiBundles(bundles: string[], outputDir: string): Promise;
diff --git a/src/__tests__/multibundle.test.ts b/src/__tests__/multibundle.test.ts
new file mode 100644
index 0000000..02533dd
--- /dev/null
+++ b/src/__tests__/multibundle.test.ts
@@ -0,0 +1,400 @@
+import { jest, describe, it, expect, beforeEach, afterEach } from '@jest/globals';
+import fs from 'node:fs';
+import os from 'node:os';
+import path from 'node:path';
+
+// ESM mocking: set up mocks before dynamic imports.
+const mockExec = jest.fn<(cmd: string, args?: string[], options?: object) => Promise>();
+const mockInfo = jest.fn();
+const mockDebug = jest.fn();
+const mockWarning = jest.fn();
+
+jest.unstable_mockModule('@actions/core', () => ({
+ info: mockInfo,
+ debug: mockDebug,
+ warning: mockWarning,
+}));
+
+jest.unstable_mockModule('@actions/exec', () => ({
+ exec: mockExec,
+}));
+
+const {
+ parseBundleListFile,
+ restoreMultiBundles,
+ previewBundleFiles,
+ logCollisionPolicy,
+ buildStrippedEnv,
+ TOKEN_ENV_DENYLIST,
+ DEFAULT_MAX_BUNDLES,
+} = await import('../multibundle.js');
+
+// ---------------------------------------------------------------------------
+// parseBundleListFile
+// ---------------------------------------------------------------------------
+
+describe('parseBundleListFile', () => {
+ let tmpDir: string;
+ let workspaceDir: string;
+ let listFile: string;
+
+ beforeEach(() => {
+ jest.clearAllMocks();
+ tmpDir = fs.mkdtempSync(path.join(os.tmpdir(), 'apm-mb-parse-'));
+ workspaceDir = fs.mkdtempSync(path.join(os.tmpdir(), 'apm-mb-ws-'));
+ listFile = path.join(tmpDir, 'bundles.txt');
+ });
+
+ afterEach(() => {
+ fs.rmSync(tmpDir, { recursive: true, force: true });
+ fs.rmSync(workspaceDir, { recursive: true, force: true });
+ delete process.env.APM_MAX_BUNDLES;
+ });
+
+ it('parses a valid 3-entry list into absolute paths', () => {
+ // Pre-create the bundles inside workspace so the relative resolution lands
+ // somewhere predictable. The parser does not check existence of bundles
+ // themselves -- only of the list file.
+ const a = path.join(workspaceDir, 'a.tar.gz');
+ const b = path.join(workspaceDir, 'b.tar.gz');
+ const c = path.join(workspaceDir, 'c.tar.gz');
+ fs.writeFileSync(listFile, [a, b, c].join('\n'));
+
+ const out = parseBundleListFile(listFile, { workspaceDir });
+ expect(out).toEqual([a, b, c]);
+ });
+
+ it('strips lines starting with #', () => {
+ const a = path.join(workspaceDir, 'a.tar.gz');
+ const b = path.join(workspaceDir, 'b.tar.gz');
+ fs.writeFileSync(listFile, [
+ '# comment line',
+ a,
+ '# another comment',
+ b,
+ ].join('\n'));
+
+ const out = parseBundleListFile(listFile, { workspaceDir });
+ expect(out).toEqual([a, b]);
+ });
+
+ it('strips blank lines and trims whitespace', () => {
+ const a = path.join(workspaceDir, 'a.tar.gz');
+ const b = path.join(workspaceDir, 'b.tar.gz');
+ fs.writeFileSync(listFile, [
+ '',
+ ` ${a} `,
+ '\t',
+ `\t${b}`,
+ '',
+ ].join('\n'));
+
+ const out = parseBundleListFile(listFile, { workspaceDir });
+ expect(out).toEqual([a, b]);
+ });
+
+ it('deduplicates preserving first occurrence', () => {
+ const a = path.join(workspaceDir, 'a.tar.gz');
+ const b = path.join(workspaceDir, 'b.tar.gz');
+ fs.writeFileSync(listFile, [a, b, a, b, a].join('\n'));
+
+ const out = parseBundleListFile(listFile, { workspaceDir });
+ expect(out).toEqual([a, b]);
+ });
+
+ it('[B3] rejects ".." segment with line number in error', () => {
+ fs.writeFileSync(listFile, [
+ path.join(workspaceDir, 'ok.tar.gz'),
+ '/tmp/bundles/../../../etc/passwd',
+ ].join('\n'));
+
+ expect(() => parseBundleListFile(listFile, { workspaceDir }))
+ .toThrow(/line 2: rejected '\.\.' segment/);
+ });
+
+ it('[B1] rejects relative path escaping workspace', () => {
+ fs.writeFileSync(listFile, 'subdir/../ok.tar.gz\n');
+ // The '..' check fires first per the rule order; assert traversal is rejected.
+ expect(() => parseBundleListFile(listFile, { workspaceDir }))
+ .toThrow(/line 1: rejected '\.\.' segment/);
+ });
+
+ it('[B1] allows absolute paths outside workspace', () => {
+ // gh-aw scenario: bundles downloaded to /tmp/, workspace in /home/runner/work/...
+ const outside = path.resolve(tmpDir, 'outside.tar.gz');
+ fs.writeFileSync(listFile, outside + '\n');
+
+ const out = parseBundleListFile(listFile, { workspaceDir });
+ expect(out).toEqual([outside]);
+ });
+
+ it('[B2] throws on non-UTF-8 file content', () => {
+ // Lone 0xFF / 0xFE bytes are invalid UTF-8 leading bytes.
+ fs.writeFileSync(listFile, Buffer.from([0xff, 0xfe, 0x00, 0x41]));
+ expect(() => parseBundleListFile(listFile, { workspaceDir }))
+ .toThrow(/not valid UTF-8/);
+ });
+
+ it('[B5] throws when list exceeds default cap of 64', () => {
+ const lines: string[] = [];
+ for (let i = 0; i < DEFAULT_MAX_BUNDLES + 1; i++) {
+ lines.push(path.join(workspaceDir, `b${i}.tar.gz`));
+ }
+ fs.writeFileSync(listFile, lines.join('\n'));
+
+ expect(() => parseBundleListFile(listFile, { workspaceDir }))
+ .toThrow(`bundles-file contains 65 bundles (max 64)`);
+ });
+
+ it('[B5] respects APM_MAX_BUNDLES env override', () => {
+ process.env.APM_MAX_BUNDLES = '2';
+ const lines = [
+ path.join(workspaceDir, 'a.tar.gz'),
+ path.join(workspaceDir, 'b.tar.gz'),
+ path.join(workspaceDir, 'c.tar.gz'),
+ ];
+ fs.writeFileSync(listFile, lines.join('\n'));
+
+ expect(() => parseBundleListFile(listFile, { workspaceDir }))
+ .toThrow(/contains 3 bundles \(max 2\)/);
+ });
+
+ it('throws when file does not exist with path and cwd', () => {
+ const missing = path.join(tmpDir, 'nope.txt');
+ expect(() => parseBundleListFile(missing, { workspaceDir }))
+ .toThrow(/bundles-file not found.*cwd:/);
+ });
+
+ it('throws when list is empty after stripping', () => {
+ fs.writeFileSync(listFile, '# only comments\n\n \n# more\n');
+ expect(() => parseBundleListFile(listFile, { workspaceDir }))
+ .toThrow(/empty after stripping/);
+ });
+
+ it('rejects entries that do not end in .tar.gz with line number', () => {
+ const ok = path.join(workspaceDir, 'ok.tar.gz');
+ fs.writeFileSync(listFile, [ok, 'bundle.zip'].join('\n'));
+ expect(() => parseBundleListFile(listFile, { workspaceDir }))
+ .toThrow(/line 2: entry must end in '\.tar\.gz'.*bundle\.zip/);
+ });
+
+ it("rejects glob patterns left unexpanded (no shell expansion)", () => {
+ fs.writeFileSync(listFile, '/tmp/bundles/*.tar.gz\n');
+ // The glob is not a literal .tar.gz file path either (the workspace check
+ // on a literal '*' character is tolerated; the extension check would pass
+ // since the suffix is .tar.gz). Globs that DON'T end in .tar.gz are caught
+ // here; literal '*'-suffix paths are caught at unpack time by the OS.
+ // This test pins the wildcard-without-extension case which is the common
+ // user mistake (e.g. '/tmp/bundles/*').
+ fs.writeFileSync(listFile, '/tmp/bundles/*\n');
+ expect(() => parseBundleListFile(listFile, { workspaceDir }))
+ .toThrow(/entry must end in '\.tar\.gz'/);
+ });
+});
+
+// ---------------------------------------------------------------------------
+// restoreMultiBundles
+// ---------------------------------------------------------------------------
+
+describe('restoreMultiBundles', () => {
+ let outDir: string;
+
+ beforeEach(() => {
+ jest.clearAllMocks();
+ outDir = fs.mkdtempSync(path.join(os.tmpdir(), 'apm-mb-out-'));
+ // Default: apm --version succeeds, all unpack invocations succeed.
+ mockExec.mockImplementation(async (cmd, args) => {
+ if (cmd === 'apm' && args?.[0] === '--version') return 0;
+ if (cmd === 'apm' && args?.[0] === 'unpack') return 0;
+ return 1;
+ });
+ });
+
+ afterEach(() => {
+ fs.rmSync(outDir, { recursive: true, force: true });
+ });
+
+ it('calls apm unpack per bundle in caller order', async () => {
+ const bundles = ['/abs/a.tar.gz', '/abs/b.tar.gz', '/abs/c.tar.gz'];
+ const result = await restoreMultiBundles(bundles, outDir);
+
+ expect(result.count).toBe(3);
+ expect(result.collisions).toEqual({ sameSha: [], differentSha: [] });
+
+ const unpackCalls = mockExec.mock.calls.filter(
+ c => c[0] === 'apm' && c[1]?.[0] === 'unpack',
+ );
+ expect(unpackCalls).toHaveLength(3);
+ expect(unpackCalls[0][1]).toEqual(['unpack', '/abs/a.tar.gz', '-o', path.resolve(outDir)]);
+ expect(unpackCalls[1][1]).toEqual(['unpack', '/abs/b.tar.gz', '-o', path.resolve(outDir)]);
+ expect(unpackCalls[2][1]).toEqual(['unpack', '/abs/c.tar.gz', '-o', path.resolve(outDir)]);
+ });
+
+ it('[B7] subprocess env excludes all entries in TOKEN_ENV_DENYLIST', async () => {
+ // Set every denylisted token in the parent env so we can prove they are
+ // ALL stripped (not just the original three). This guards against future
+ // additions to the denylist quietly regressing.
+ const prev: Record = {};
+ for (const key of TOKEN_ENV_DENYLIST) {
+ prev[key] = process.env[key];
+ process.env[key] = `parent-${key}`;
+ }
+
+ try {
+ await restoreMultiBundles(['/abs/a.tar.gz'], outDir);
+
+ const unpack = mockExec.mock.calls.find(
+ c => c[0] === 'apm' && c[1]?.[0] === 'unpack',
+ );
+ expect(unpack).toBeTruthy();
+ const opts = unpack![2] as { env?: Record };
+ expect(opts?.env).toBeDefined();
+ for (const key of TOKEN_ENV_DENYLIST) {
+ expect(opts.env![key]).toBeUndefined();
+ }
+ } finally {
+ for (const key of TOKEN_ENV_DENYLIST) {
+ if (prev[key] === undefined) delete process.env[key];
+ else process.env[key] = prev[key];
+ }
+ }
+ });
+
+ it('[B8] invokes apm via argv array, not shell', async () => {
+ await restoreMultiBundles(['/abs/a.tar.gz'], outDir);
+ const unpack = mockExec.mock.calls.find(
+ c => c[0] === 'apm' && c[1]?.[0] === 'unpack',
+ );
+ expect(unpack).toBeTruthy();
+ // argv array form: cmd is exactly 'apm' (not a shell string), args is an array.
+ expect(unpack![0]).toBe('apm');
+ expect(Array.isArray(unpack![1])).toBe(true);
+ });
+
+ it('[B4] hard-fails if apm is not on PATH', async () => {
+ mockExec.mockImplementation(async (cmd, args) => {
+ if (cmd === 'apm' && args?.[0] === '--version') return 1;
+ return 0;
+ });
+
+ await expect(restoreMultiBundles(['/abs/a.tar.gz'], outDir))
+ .rejects.toThrow(/apm CLI not found on PATH/);
+ });
+
+ it('fail-fast: stops at first failing bundle with index in message', async () => {
+ let unpackIdx = 0;
+ mockExec.mockImplementation(async (cmd, args) => {
+ if (cmd === 'apm' && args?.[0] === '--version') return 0;
+ if (cmd === 'apm' && args?.[0] === 'unpack') {
+ unpackIdx++;
+ return unpackIdx === 2 ? 7 : 0;
+ }
+ return 1;
+ });
+
+ const bundles = ['/abs/a.tar.gz', '/abs/b.tar.gz', '/abs/c.tar.gz'];
+ await expect(restoreMultiBundles(bundles, outDir))
+ .rejects.toThrow(/bundle 2 of 3.*\/abs\/b\.tar\.gz.*exit code: 7/s);
+
+ const unpackCalls = mockExec.mock.calls.filter(
+ c => c[0] === 'apm' && c[1]?.[0] === 'unpack',
+ );
+ // Only 2 unpack calls -- third bundle never attempted.
+ expect(unpackCalls).toHaveLength(2);
+ });
+
+ it('[B9] does not reorder bundles', async () => {
+ const bundles = ['/z.tar.gz', '/a.tar.gz', '/m.tar.gz'];
+ await restoreMultiBundles(bundles, outDir);
+ const order = mockExec.mock.calls
+ .filter(c => c[0] === 'apm' && c[1]?.[0] === 'unpack')
+ .map(c => c[1]![1]);
+ expect(order).toEqual(['/z.tar.gz', '/a.tar.gz', '/m.tar.gz']);
+ });
+});
+
+// ---------------------------------------------------------------------------
+// previewBundleFiles
+// ---------------------------------------------------------------------------
+
+describe('previewBundleFiles', () => {
+ it('returns empty CollisionReport (stub for v1.5.0)', async () => {
+ const report = await previewBundleFiles(['/a.tar.gz', '/b.tar.gz']);
+ expect(report).toEqual({ sameSha: [], differentSha: [] });
+ });
+});
+
+// ---------------------------------------------------------------------------
+// logCollisionPolicy
+// ---------------------------------------------------------------------------
+
+describe('logCollisionPolicy', () => {
+ beforeEach(() => {
+ mockWarning.mockClear();
+ });
+
+ it('emits no warning when bundleCount <= 1 (no possible collisions)', () => {
+ logCollisionPolicy(0);
+ logCollisionPolicy(1);
+ expect(mockWarning).not.toHaveBeenCalled();
+ });
+
+ it('emits exactly one warning naming the bundle count when N > 1', () => {
+ logCollisionPolicy(3);
+ expect(mockWarning).toHaveBeenCalledTimes(1);
+ const msg = mockWarning.mock.calls[0][0] as string;
+ expect(msg).toContain('3 bundles');
+ expect(msg).toContain('list order');
+ expect(msg).toContain('overwrite');
+ });
+});
+
+// ---------------------------------------------------------------------------
+// buildStrippedEnv
+// ---------------------------------------------------------------------------
+
+describe('buildStrippedEnv', () => {
+ it('[B7] deletes every entry in TOKEN_ENV_DENYLIST and includes the new tokens', () => {
+ // Pin the explicit set so future additions to the denylist either extend
+ // this assertion or trip a clear test failure.
+ expect(TOKEN_ENV_DENYLIST).toEqual(
+ expect.arrayContaining([
+ 'GITHUB_APM_PAT',
+ 'ADO_APM_PAT',
+ 'GITHUB_TOKEN',
+ 'GH_TOKEN',
+ 'ACTIONS_RUNTIME_TOKEN',
+ 'ACTIONS_ID_TOKEN_REQUEST_TOKEN',
+ ]),
+ );
+
+ const prev: Record = {};
+ for (const key of TOKEN_ENV_DENYLIST) {
+ prev[key] = process.env[key];
+ process.env[key] = `set-${key}`;
+ }
+
+ try {
+ const env = buildStrippedEnv();
+ for (const key of TOKEN_ENV_DENYLIST) {
+ expect(env[key]).toBeUndefined();
+ }
+ } finally {
+ for (const key of TOKEN_ENV_DENYLIST) {
+ if (prev[key] === undefined) delete process.env[key];
+ else process.env[key] = prev[key];
+ }
+ }
+ });
+
+ it('preserves PATH and other env vars', () => {
+ process.env.MULTIBUNDLE_TEST_VAR = 'preserve-me';
+ try {
+ const env = buildStrippedEnv();
+ expect(env.PATH).toBe(process.env.PATH);
+ expect(env.MULTIBUNDLE_TEST_VAR).toBe('preserve-me');
+ } finally {
+ delete process.env.MULTIBUNDLE_TEST_VAR;
+ }
+ });
+});
diff --git a/src/__tests__/runner.test.ts b/src/__tests__/runner.test.ts
index f594cb5..23a2c7b 100644
--- a/src/__tests__/runner.test.ts
+++ b/src/__tests__/runner.test.ts
@@ -706,3 +706,129 @@ describe('run (restore mode)', () => {
expect(installOrder).toBeLessThan(extractOrder);
});
});
+
+// ---------------------------------------------------------------------------
+// 3-way mutex: pack / bundle / bundles-file
+// ---------------------------------------------------------------------------
+//
+// Existing `mockGetInput.mockImplementation` switch blocks already fall through
+// to `default: return ''` so they handle the new `'bundles-file'` input
+// transparently with no edits to the existing tests.
+
+describe('3-way mutex (pack / bundle / bundles-file)', () => {
+ let tmpDir: string;
+
+ beforeEach(() => {
+ jest.clearAllMocks();
+ tmpDir = fs.mkdtempSync(path.join(os.tmpdir(), 'apm-action-mutex-'));
+ mockEnsureApmInstalled.mockResolvedValue(undefined);
+ mockExec.mockResolvedValue(0);
+ mockGetExecOutput.mockResolvedValue({ exitCode: 0, stdout: '', stderr: '' });
+ mockResolveLocalBundle.mockImplementation(async () => path.join(tmpDir, 'bundle.tar.gz'));
+ mockExtractBundle.mockResolvedValue({ files: 5, verified: true });
+ });
+
+ afterEach(() => {
+ fs.rmSync(tmpDir, { recursive: true, force: true });
+ });
+
+ function inputs(over: Partial>): (name: unknown) => string {
+ const base: Record = {
+ 'working-directory': tmpDir,
+ dependencies: '',
+ isolated: 'false',
+ bundle: '',
+ 'bundles-file': '',
+ pack: 'false',
+ compile: 'false',
+ script: '',
+ 'audit-report': '',
+ target: '',
+ archive: 'true',
+ };
+ const merged = { ...base, ...over };
+ return (name: unknown) => merged[name as string] ?? '';
+ }
+
+ it('rejects pack + bundle', async () => {
+ mockGetInput.mockImplementation(inputs({ pack: 'true', bundle: './x.tar.gz' }));
+ await run();
+ expect(mockSetFailed).toHaveBeenCalledWith(
+ expect.stringContaining('mutually exclusive'),
+ );
+ expect(mockSetFailed).toHaveBeenCalledWith(
+ expect.stringContaining('pack, bundle'),
+ );
+ });
+
+ it('rejects pack + bundles-file', async () => {
+ mockGetInput.mockImplementation(inputs({ pack: 'true', 'bundles-file': '/tmp/list.txt' }));
+ await run();
+ expect(mockSetFailed).toHaveBeenCalledWith(
+ expect.stringContaining('mutually exclusive'),
+ );
+ expect(mockSetFailed).toHaveBeenCalledWith(
+ expect.stringContaining('pack, bundles-file'),
+ );
+ });
+
+ it('rejects bundle + bundles-file', async () => {
+ mockGetInput.mockImplementation(inputs({ bundle: './x.tar.gz', 'bundles-file': '/tmp/list.txt' }));
+ await run();
+ expect(mockSetFailed).toHaveBeenCalledWith(
+ expect.stringContaining('mutually exclusive'),
+ );
+ expect(mockSetFailed).toHaveBeenCalledWith(
+ expect.stringContaining('bundle, bundles-file'),
+ );
+ });
+
+ it('rejects all three', async () => {
+ mockGetInput.mockImplementation(inputs({
+ pack: 'true', bundle: './x.tar.gz', 'bundles-file': '/tmp/list.txt',
+ }));
+ await run();
+ expect(mockSetFailed).toHaveBeenCalledWith(
+ expect.stringContaining('pack, bundle, bundles-file'),
+ );
+ });
+
+ it('allows pack alone', async () => {
+ fs.writeFileSync(path.join(tmpDir, 'apm.yml'), 'name: t\nversion: 1.0.0\n');
+ fs.mkdirSync(path.join(tmpDir, 'build'), { recursive: true });
+ fs.writeFileSync(path.join(tmpDir, 'build', 'pkg-1.0.0.tar.gz'), 'fake');
+ mockRunPackStep.mockResolvedValue(path.join(tmpDir, 'build', 'pkg-1.0.0.tar.gz'));
+
+ mockGetInput.mockImplementation(inputs({ pack: 'true' }));
+ await run();
+ expect(mockSetFailed).not.toHaveBeenCalled();
+ });
+
+ it('allows bundle alone', async () => {
+ mockGetInput.mockImplementation(inputs({ bundle: './x.tar.gz' }));
+ await run();
+ expect(mockSetFailed).not.toHaveBeenCalled();
+ });
+
+ it('allows bundles-file alone', async () => {
+ // Create a real list file with one bundle path -- parseBundleListFile uses
+ // real fs. The mocked @actions/exec returns 0 for both `apm --version`
+ // and `apm unpack`, so the multi-bundle branch completes successfully.
+ const listFile = path.join(tmpDir, 'bundles.txt');
+ fs.writeFileSync(listFile, '/abs/some-bundle.tar.gz\n');
+
+ mockGetInput.mockImplementation(inputs({ 'bundles-file': listFile }));
+ await run();
+
+ expect(mockSetFailed).not.toHaveBeenCalled();
+ expect(mockSetOutput).toHaveBeenCalledWith('bundles-restored', '1');
+ });
+
+ it('allows none (default install mode)', async () => {
+ fs.writeFileSync(path.join(tmpDir, 'apm.yml'), 'name: t\nversion: 1.0.0\n');
+ mockGetInput.mockImplementation(inputs({}));
+ await run();
+ expect(mockSetFailed).not.toHaveBeenCalled();
+ });
+});
+
diff --git a/src/multibundle.ts b/src/multibundle.ts
new file mode 100644
index 0000000..ca6b9d9
--- /dev/null
+++ b/src/multibundle.ts
@@ -0,0 +1,317 @@
+// Gap #1 resolution: `apm unpack --dry-run` IS available in the installed apm CLI
+// (verified via `apm unpack --help` during Phase 2). However, full collision
+// detection across N bundles is deferred to a follow-up PR per the design plan;
+// `previewBundleFiles` is therefore stubbed to return an empty CollisionReport.
+import * as core from '@actions/core';
+import * as exec from '@actions/exec';
+import * as fs from 'fs';
+import * as path from 'path';
+
+/**
+ * Env-var denylist stripped from the apm unpack subprocess (B7).
+ *
+ * Includes:
+ * - APM-recognised credentials: GITHUB_APM_PAT, ADO_APM_PAT.
+ * - GitHub CLI / Actions token aliases that APM may auto-detect now or in
+ * future releases: GITHUB_TOKEN, GH_TOKEN.
+ * - Runner-scoped tokens with high blast radius if exfiltrated by a malicious
+ * bundle's hypothetical lifecycle hook: ACTIONS_RUNTIME_TOKEN (cache write),
+ * ACTIONS_ID_TOKEN_REQUEST_TOKEN (OIDC federation).
+ *
+ * Defence-in-depth: `apm unpack` itself does not need any of these, and the
+ * restore-side multi-bundle path performs no authenticated network calls.
+ */
+export const TOKEN_ENV_DENYLIST: readonly string[] = [
+ 'GITHUB_APM_PAT',
+ 'ADO_APM_PAT',
+ 'GITHUB_TOKEN',
+ 'GH_TOKEN',
+ 'ACTIONS_RUNTIME_TOKEN',
+ 'ACTIONS_ID_TOKEN_REQUEST_TOKEN',
+];
+
+/** Default cap on the number of bundles a single list file may contain (B5). */
+export const DEFAULT_MAX_BUNDLES = 64;
+
+/** Options for parsing a bundle list file. */
+export interface ParseOptions {
+ /**
+ * Maximum number of bundles allowed.
+ * Defaults to APM_MAX_BUNDLES env var, then DEFAULT_MAX_BUNDLES (64).
+ */
+ maxBundles?: number;
+ /**
+ * Directory to resolve relative paths against.
+ * Defaults to GITHUB_WORKSPACE or cwd.
+ */
+ workspaceDir?: string;
+}
+
+/** A single collision between two bundles deploying the same target file. */
+export interface FileCollision {
+ /** Relative target path inside the workspace (e.g. ".github/skills/foo/SKILL.md"). */
+ targetPath: string;
+ /** Absolute path of the bundle that was overwritten (earlier in list). */
+ overwrittenBundle: string;
+ /** Absolute path of the bundle that won (later in list). */
+ winningBundle: string;
+}
+
+/** Collision report from a multi-bundle preview or restore. */
+export interface CollisionReport {
+ /** Files deployed by multiple bundles with byte-identical content. */
+ sameSha: FileCollision[];
+ /** Files deployed by multiple bundles with DIFFERENT content (last wins). */
+ differentSha: FileCollision[];
+}
+
+/** Result of a multi-bundle restore operation. */
+export interface RestoreResult {
+ /** Number of bundles successfully restored. */
+ count: number;
+ /** Collision report (populated during restore). */
+ collisions: CollisionReport;
+}
+
+/**
+ * Build a sanitised env for the apm unpack subprocess: process.env with the
+ * token denylist removed. Defence-in-depth so a malicious bundle's lifecycle
+ * hooks (if any are ever introduced) cannot exfiltrate the runner's auth.
+ */
+export function buildStrippedEnv(): Record {
+ // process.env is Record. Filter undefined-valued
+ // entries up-front so the returned record is genuinely Record
+ // without an unsafe `as` cast that hides the underlying type mismatch.
+ const env: Record = Object.fromEntries(
+ Object.entries(process.env).filter(
+ (entry): entry is [string, string] => entry[1] !== undefined,
+ ),
+ );
+ for (const key of TOKEN_ENV_DENYLIST) {
+ delete env[key];
+ }
+ return env;
+}
+
+/**
+ * Parse a newline-separated bundle list file into validated, deduped paths.
+ *
+ * Rules:
+ * - File must exist and be readable (hard error with path + cwd).
+ * - UTF-8 only (hard error on decode failure).
+ * - Lines starting with '#' are comments (skipped).
+ * - Blank lines are skipped.
+ * - '..' segment in any path -> reject with line number (B3).
+ * - Relative paths resolved against opts.workspaceDir; rejected if they escape it (B1).
+ * - Absolute paths allowed (matches existing bundle: behaviour, B1).
+ * - Each entry must end in `.tar.gz` (defence-in-depth + clear early failure
+ * if a user accidentally points at a directory or wrong file). Glob patterns
+ * are NOT expanded; use `find ... | sort` to generate the list yourself.
+ * - Empty list after stripping -> hard error.
+ * - Duplicates deduped silently (first occurrence wins).
+ * - Cap at opts.maxBundles (default 64, env APM_MAX_BUNDLES) (B5).
+ */
+export function parseBundleListFile(filePath: string, opts?: ParseOptions): string[] {
+ const cwd = process.cwd();
+ const resolvedListPath = path.isAbsolute(filePath) ? filePath : path.resolve(cwd, filePath);
+
+ if (!fs.existsSync(resolvedListPath)) {
+ throw new Error(
+ `bundles-file not found: ${filePath} (resolved: ${resolvedListPath}, cwd: ${cwd})`,
+ );
+ }
+
+ // Read as Buffer first so we can validate UTF-8 (B2).
+ let raw: Buffer;
+ try {
+ raw = fs.readFileSync(resolvedListPath);
+ } catch (e) {
+ const msg = e instanceof Error ? e.message : String(e);
+ throw new Error(`bundles-file unreadable: ${resolvedListPath}: ${msg}`);
+ }
+
+ // Strict UTF-8 decode using TextDecoder with fatal: true.
+ let content: string;
+ try {
+ content = new TextDecoder('utf-8', { fatal: true }).decode(raw);
+ } catch {
+ throw new Error(
+ `bundles-file is not valid UTF-8: ${resolvedListPath}`,
+ );
+ }
+
+ const workspaceDir = opts?.workspaceDir
+ ?? process.env.GITHUB_WORKSPACE
+ ?? cwd;
+ const resolvedWorkspace = path.resolve(workspaceDir);
+
+ const envCap = parseInt(process.env.APM_MAX_BUNDLES || '', 10);
+ const maxBundles = Number.isFinite(envCap) && envCap > 0
+ ? envCap
+ : (opts?.maxBundles ?? DEFAULT_MAX_BUNDLES);
+
+ const lines = content.split(/\r?\n/);
+ const seen = new Set();
+ const result: string[] = [];
+
+ for (let i = 0; i < lines.length; i++) {
+ const lineNum = i + 1;
+ const trimmed = lines[i].trim();
+ if (!trimmed) continue;
+ if (trimmed.startsWith('#')) continue;
+
+ // Reject any '..' segment before resolving (B3). Normalise both '/' and '\'.
+ const segments = trimmed.split(/[\\/]+/);
+ if (segments.some(seg => seg === '..')) {
+ throw new Error(
+ `bundles-file line ${lineNum}: rejected '..' segment in path: ${trimmed}`,
+ );
+ }
+
+ // Require .tar.gz extension. Globs are not expanded; bare paths only.
+ // Catches mis-configured list files (typo, directory, or wildcard left
+ // unexpanded) at parse time rather than surfacing as a confusing tar error.
+ if (!trimmed.toLowerCase().endsWith('.tar.gz')) {
+ throw new Error(
+ `bundles-file line ${lineNum}: entry must end in '.tar.gz' `
+ + `(globs are not expanded; use find or ls to generate the list): ${trimmed}`,
+ );
+ }
+
+ const isAbs = path.isAbsolute(trimmed);
+ const resolved = isAbs ? path.resolve(trimmed) : path.resolve(resolvedWorkspace, trimmed);
+
+ // Workspace escape check (B1) -- relative paths only. Absolute paths are
+ // user-explicit and allowed outside the workspace (mirrors bundler.ts).
+ if (!isAbs) {
+ const rel = path.relative(resolvedWorkspace, resolved);
+ if (rel.startsWith('..') || path.isAbsolute(rel)) {
+ throw new Error(
+ `bundles-file line ${lineNum}: relative path escapes workspace ${resolvedWorkspace}: ${trimmed}`,
+ );
+ }
+ }
+
+ if (seen.has(resolved)) continue;
+ seen.add(resolved);
+ result.push(resolved);
+ }
+
+ if (result.length === 0) {
+ throw new Error(
+ `bundles-file is empty after stripping comments and blank lines: ${resolvedListPath}`,
+ );
+ }
+ if (result.length > maxBundles) {
+ throw new Error(
+ `bundles-file contains ${result.length} bundles (max ${maxBundles})`,
+ );
+ }
+
+ return result;
+}
+
+/**
+ * Preview file collisions across N bundles without extracting.
+ *
+ * NOTE: Stubbed for v1.5.0 -- returns an empty CollisionReport. Full
+ * implementation (which would shell out to `apm unpack --dry-run` and
+ * aggregate file lists across bundles, distinguishing same-SHA from
+ * different-SHA overlaps) is planned for v1.6.0. The restore loop is NOT
+ * blocked on this; the policy is documented up-front via
+ * `logCollisionPolicy()` so users are not surprised by silent overwrites.
+ *
+ * The function is wired into the runner today so its call site is real,
+ * not dead code -- the v1.6.0 follow-up only swaps the implementation.
+ */
+export async function previewBundleFiles(
+ bundles: string[],
+): Promise {
+ void bundles;
+ core.debug('previewBundleFiles: dry-run aggregation not yet implemented; returning empty report');
+ return { sameSha: [], differentSha: [] };
+}
+
+/**
+ * Emit a single, explicit policy banner BEFORE the restore loop runs so the
+ * user is never surprised by silent overwrites. No-op for the single-bundle
+ * case (no possible collisions). Intentionally `core.warning` not `core.info`
+ * so it is annotated visibly in the GitHub Actions summary.
+ */
+export function logCollisionPolicy(bundleCount: number): void {
+ if (bundleCount <= 1) return;
+ core.warning(
+ `Multi-bundle restore: ${bundleCount} bundles will be applied in list order. `
+ + `On file conflicts, later bundles overwrite earlier bundles silently. `
+ + `Per-file SHA collision detection is planned for v1.6.0. `
+ + `Until then, ensure the bundle list is in your intended precedence order.`,
+ );
+}
+
+/**
+ * Restore N bundles into the same workspace directory, in caller-specified order.
+ *
+ * - Verifies `apm` is on PATH (B4: hard fail, no fallback).
+ * - Loops through bundles in order, calling `apm unpack -o `.
+ * - Subprocess env has GITHUB_APM_PAT, ADO_APM_PAT, GITHUB_TOKEN stripped (B7).
+ * - Subprocess uses argv array, not shell string (B8).
+ * - Fail-fast: if bundle K fails, throw with index K, path, and stderr.
+ * - Returns count + empty CollisionReport (collision detection deferred).
+ *
+ * @param bundles Ordered array of absolute bundle paths (from parseBundleListFile).
+ * @param outputDir Workspace directory to restore into.
+ */
+export async function restoreMultiBundles(
+ bundles: string[],
+ outputDir: string,
+): Promise {
+ // B4: hard-fail if apm is not on PATH. Caller is expected to have invoked
+ // ensureApmInstalled() already; this is a defensive check, not a fallback.
+ const apmAvailable = await exec.exec('apm', ['--version'], {
+ ignoreReturnCode: true,
+ silent: true,
+ }).catch(() => 1) === 0;
+
+ if (!apmAvailable) {
+ throw new Error(
+ 'apm CLI not found on PATH. Multi-bundle restore requires APM to be installed; '
+ + 'ensure ensureApmInstalled() ran before restoreMultiBundles().',
+ );
+ }
+
+ const resolvedOutput = path.resolve(outputDir);
+ const env = buildStrippedEnv();
+ const total = bundles.length;
+
+ for (let i = 0; i < total; i++) {
+ const bundle = bundles[i];
+ const human = `bundle ${i + 1} of ${total}`;
+ core.info(`[${human}] Unpacking: ${bundle}`);
+
+ let stderr = '';
+ const rc = await exec.exec('apm', ['unpack', bundle, '-o', resolvedOutput], {
+ ignoreReturnCode: true,
+ env,
+ listeners: {
+ stderr: (data: Buffer) => { stderr += data.toString(); },
+ },
+ });
+
+ if (rc !== 0) {
+ const tail = stderr.trim().split(/\r?\n/).slice(-10).join('\n');
+ throw new Error(
+ `apm unpack failed for ${human} (path: ${bundle}, exit code: ${rc})`
+ + (tail ? `\nstderr:\n${tail}` : ''),
+ );
+ }
+
+ // Per-bundle confirmation so a stalled run is debuggable from the log
+ // alone without re-reading the surrounding 'Unpacking' lines.
+ core.info(`[${human}] OK`);
+ }
+
+ return {
+ count: total,
+ collisions: { sameSha: [], differentSha: [] },
+ };
+}
diff --git a/src/runner.ts b/src/runner.ts
index 1ca57c7..ab6cb46 100644
--- a/src/runner.ts
+++ b/src/runner.ts
@@ -23,6 +23,7 @@ export async function run(): Promise {
const workingDir = core.getInput('working-directory') || '.';
const resolvedDir = path.resolve(workingDir);
const bundleInput = core.getInput('bundle').trim();
+ const bundlesFileInput = core.getInput('bundles-file').trim();
const packInput = core.getInput('pack') === 'true';
const isolated = core.getInput('isolated') === 'true';
const auditReportInput = core.getInput('audit-report').trim();
@@ -53,9 +54,17 @@ export async function run(): Promise {
}
}
- // Validate inputs before touching the filesystem.
- if (bundleInput && packInput) {
- throw new Error("'pack' and 'bundle' inputs are mutually exclusive");
+ // 3-way mutex: at most one of pack / bundle / bundles-file.
+ const modeFlags = [
+ packInput && 'pack',
+ bundleInput && 'bundle',
+ bundlesFileInput && 'bundles-file',
+ ].filter(Boolean) as string[];
+ if (modeFlags.length > 1) {
+ throw new Error(
+ `inputs 'pack', 'bundle', and 'bundles-file' are mutually exclusive `
+ + `(got: ${modeFlags.join(', ')}). Pick exactly one mode per step.`,
+ );
}
// Directory creation contract:
@@ -65,7 +74,7 @@ export async function run(): Promise {
// - non-isolated mode: the caller owns the project directory (which must
// contain apm.yml). If it doesn't exist, we fail fast with a clear message
// rather than silently creating an empty directory that would just fail later.
- const actionOwnsDir = isolated || packInput || !!bundleInput;
+ const actionOwnsDir = isolated || packInput || !!bundleInput || !!bundlesFileInput;
if (actionOwnsDir) {
fs.mkdirSync(resolvedDir, { recursive: true });
} else if (!fs.existsSync(resolvedDir)) {
@@ -136,6 +145,64 @@ export async function run(): Promise {
return;
}
+ // MULTI-BUNDLE RESTORE MODE
+ if (bundlesFileInput) {
+ const {
+ parseBundleListFile,
+ previewBundleFiles,
+ logCollisionPolicy,
+ restoreMultiBundles,
+ } = await import('./multibundle.js');
+
+ const bundles = parseBundleListFile(bundlesFileInput, {
+ workspaceDir: resolvedDir,
+ });
+ core.info(`Multi-bundle restore: ${bundles.length} bundle(s) from ${bundlesFileInput}`);
+
+ // Surface the collision policy BEFORE any work happens so users are
+ // never surprised by silent overwrites. Wired to previewBundleFiles
+ // so the call site is real today; per-file SHA collision detection
+ // ships in v1.6.0 (currently a no-op stub).
+ logCollisionPolicy(bundles.length);
+ const preview = await previewBundleFiles(bundles);
+ if (preview.differentSha.length > 0) {
+ core.warning(
+ `Detected ${preview.differentSha.length} different-content collision(s) `
+ + `across bundles. Later bundles in the list will win.`,
+ );
+ }
+ if (preview.sameSha.length > 0) {
+ core.info(
+ `Detected ${preview.sameSha.length} byte-identical file overlap(s) `
+ + `across bundles (benign duplicates).`,
+ );
+ }
+
+ // ensureApmInstalled() runs the install pipeline; restoreMultiBundles
+ // additionally probes `apm --version` as a defence-in-depth check so
+ // a transient install failure surfaces with a clear error before the
+ // first unpack rather than as a generic ENOENT mid-loop.
+ await ensureApmInstalled();
+ const result = await restoreMultiBundles(bundles, resolvedDir);
+
+ core.info(
+ `Restored ${result.count} bundle(s) successfully into ${resolvedDir}`,
+ );
+
+ const primitivesPath = path.join(resolvedDir, '.github');
+ core.setOutput('primitives-path', primitivesPath);
+ core.setOutput('bundles-restored', String(result.count));
+
+ // Run audit on merged workspace if requested
+ if (auditReportPath) {
+ await runAuditReport(resolvedDir, auditReportPath);
+ }
+
+ core.setOutput('success', 'true');
+ core.info('APM action completed successfully (multi-bundle restore mode)');
+ return;
+ }
+
// 1. Install APM CLI (install + pack modes)
await ensureApmInstalled();