diff --git a/.changeset/patch-js-apm-unpack-restore.md b/.changeset/patch-js-apm-unpack-restore.md
new file mode 100644
index 00000000000..39888876f7e
--- /dev/null
+++ b/.changeset/patch-js-apm-unpack-restore.md
@@ -0,0 +1,5 @@
+---
+"gh-aw": patch
+---
+
+Implement JavaScript-based APM bundle restore in agent jobs, replacing the `microsoft/apm-action` restore step while keeping pack behavior unchanged.
diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml
index ec412261851..74da5e50e71 100644
--- a/.github/workflows/ci.yml
+++ b/.github/workflows/ci.yml
@@ -953,6 +953,149 @@ jobs:
echo "✨ Live API test completed successfully" >> $GITHUB_STEP_SUMMARY
fi
+ js-apm-unpack-integration:
+ name: APM Pack/Unpack Integration (Python vs JS)
+ runs-on: ubuntu-latest
+ timeout-minutes: 15
+ needs: validate-yaml
+ permissions:
+ contents: read
+ concurrency:
+ group: ci-${{ github.ref }}-js-apm-unpack-integration
+ cancel-in-progress: true
+ steps:
+ - name: Checkout code
+ uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
+
+ - name: Set up Python
+ uses: actions/setup-python@a309ff8b426b58ec0e2a45f0f869d46889d02405 # v6.2.0
+ with:
+ python-version: "3.12"
+
+ - name: Install APM CLI
+ run: pip install --quiet apm-cli
+
+ - name: Set up Node.js
+ id: setup-node
+ uses: actions/setup-node@395ad3262231945c25e8478fd5baf05154b1d79f # v6
+ with:
+ node-version: "24"
+ cache: npm
+ cache-dependency-path: actions/setup/js/package-lock.json
+
+ - name: Install npm dependencies
+ run: cd actions/setup/js && npm ci
+
+ - name: Create minimal APM test project
+ run: |
+ set -e
+ APM_PROJECT=/tmp/apm-test-project
+ mkdir -p "$APM_PROJECT"
+ cd "$APM_PROJECT"
+
+ # apm.yml — required by the packer for name/version
+ cat > apm.yml << 'APMEOF'
+ name: gh-aw-test-package
+ version: 1.0.0
+ APMEOF
+
+ # apm.lock.yaml — two dependencies, mixed files and a directory entry
+ cat > apm.lock.yaml << 'APMEOF'
+ lockfile_version: '1'
+ apm_version: '0.8.5'
+ dependencies:
+ - repo_url: https://github.com/test-owner/skill-a
+ resolved_commit: aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa
+ resolved_ref: main
+ virtual_path: null
+ is_local: false
+ deployed_files:
+ - .github/skills/skill-a/
+ - .github/copilot-instructions.md
+ - repo_url: https://github.com/test-owner/skill-b
+ resolved_commit: bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb
+ resolved_ref: v2.0.0
+ virtual_path: null
+ is_local: false
+ deployed_files:
+ - .github/skills/skill-b/skill.md
+ - .github/agents.md
+ APMEOF
+
+ # Create files referenced by deployed_files
+ mkdir -p .github/skills/skill-a
+ printf '# Skill A\nThis is skill A content.\n' > .github/skills/skill-a/skill.md
+ printf 'Skill A helper notes.\n' > .github/skills/skill-a/notes.txt
+ printf '# Copilot Instructions\nFollow these rules.\n' > .github/copilot-instructions.md
+ mkdir -p .github/skills/skill-b
+ printf '# Skill B\nThis is skill B.\n' > .github/skills/skill-b/skill.md
+ printf '# Agents\nAgent configuration.\n' > .github/agents.md
+
+ echo "✅ APM test project created at $APM_PROJECT"
+ find "$APM_PROJECT" -type f | sort
+
+ - name: Pack APM bundle
+ run: |
+ set -e
+ cd /tmp/apm-test-project
+ mkdir -p /tmp/apm-bundle
+ apm pack --archive -o /tmp/apm-bundle
+ echo ""
+ echo "✅ Bundle created:"
+ ls -lh /tmp/apm-bundle/*.tar.gz
+
+ - name: Unpack with Python (microsoft/apm reference)
+ run: |
+ set -e
+ mkdir -p /tmp/apm-out-python
+ BUNDLE=$(ls /tmp/apm-bundle/*.tar.gz)
+ apm unpack "$BUNDLE" -o /tmp/apm-out-python
+ echo ""
+ echo "=== Python unpack result ==="
+ find /tmp/apm-out-python -type f | sort
+
+ - name: Unpack with JavaScript (apm_unpack.cjs)
+ env:
+ APM_BUNDLE_DIR: /tmp/apm-bundle
+ OUTPUT_DIR: /tmp/apm-out-js
+ run: |
+ set -e
+ mkdir -p /tmp/apm-out-js
+ node actions/setup/js/run_apm_unpack.cjs
+ echo ""
+ echo "=== JavaScript unpack result ==="
+ find /tmp/apm-out-js -type f | sort
+
+ - name: Compare Python vs JavaScript unpack outputs
+ run: |
+ set -e
+ echo "## APM Unpack Integration Test" >> $GITHUB_STEP_SUMMARY
+ echo "" >> $GITHUB_STEP_SUMMARY
+
+ echo "### Files unpacked by Python (reference)" >> $GITHUB_STEP_SUMMARY
+ echo '```' >> $GITHUB_STEP_SUMMARY
+ find /tmp/apm-out-python -type f | sort | sed "s|/tmp/apm-out-python/||" >> $GITHUB_STEP_SUMMARY
+ echo '```' >> $GITHUB_STEP_SUMMARY
+
+ echo "### Files unpacked by JavaScript" >> $GITHUB_STEP_SUMMARY
+ echo '```' >> $GITHUB_STEP_SUMMARY
+ find /tmp/apm-out-js -type f | sort | sed "s|/tmp/apm-out-js/||" >> $GITHUB_STEP_SUMMARY
+ echo '```' >> $GITHUB_STEP_SUMMARY
+
+ if diff -rq /tmp/apm-out-python /tmp/apm-out-js > /tmp/apm-diff.txt 2>&1; then
+ echo "### ✅ Outputs are identical" >> $GITHUB_STEP_SUMMARY
+ echo "✅ Python and JavaScript unpack results match"
+ else
+ echo "### ❌ Outputs differ" >> $GITHUB_STEP_SUMMARY
+ echo '```diff' >> $GITHUB_STEP_SUMMARY
+ diff -r /tmp/apm-out-python /tmp/apm-out-js >> $GITHUB_STEP_SUMMARY 2>&1 || true
+ echo '```' >> $GITHUB_STEP_SUMMARY
+ echo "❌ Python and JavaScript unpack results differ:"
+ cat /tmp/apm-diff.txt
+ diff -r /tmp/apm-out-python /tmp/apm-out-js || true
+ exit 1
+ fi
+
bench:
# Only run benchmarks on main branch for performance tracking
if: github.ref == 'refs/heads/main'
diff --git a/.github/workflows/smoke-claude.lock.yml b/.github/workflows/smoke-claude.lock.yml
index 60ca3938dfa..c488ae6c6bc 100644
--- a/.github/workflows/smoke-claude.lock.yml
+++ b/.github/workflows/smoke-claude.lock.yml
@@ -808,10 +808,15 @@ jobs:
name: apm
path: /tmp/gh-aw/apm-bundle
- name: Restore APM dependencies
- uses: microsoft/apm-action@cc84c04bc73e19e35527f1efa34ea003be9f037f # v1.4.0
+ uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8
+ env:
+ APM_BUNDLE_DIR: /tmp/gh-aw/apm-bundle
with:
- bundle: /tmp/gh-aw/apm-bundle/*.tar.gz
- apm-version: ${{ env.GH_AW_INFO_APM_VERSION }}
+ script: |
+ const { setupGlobals } = require('${{ runner.temp }}/gh-aw/actions/setup_globals.cjs');
+ setupGlobals(core, github, context, exec, io);
+ const { main } = require('${{ runner.temp }}/gh-aw/actions/apm_unpack.cjs');
+ await main();
- name: Determine automatic lockdown mode for GitHub MCP Server
id: determine-automatic-lockdown
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8
diff --git a/actions/setup/js/apm_unpack.cjs b/actions/setup/js/apm_unpack.cjs
new file mode 100644
index 00000000000..360e0f574f0
--- /dev/null
+++ b/actions/setup/js/apm_unpack.cjs
@@ -0,0 +1,829 @@
+// @ts-check
+///
+
+/**
+ * APM Bundle Unpacker
+ *
+ * JavaScript implementation of the APM (Agent Package Manager) bundle unpack
+ * algorithm, equivalent to microsoft/apm unpacker.py.
+ *
+ * This module extracts and deploys an APM bundle (tar.gz archive) to the
+ * GitHub Actions workspace. It replaces the `microsoft/apm-action` restore
+ * step in the agent job, removing the external dependency for unpacking.
+ *
+ * Algorithm (mirrors unpacker.py):
+ * 1. Locate the tar.gz bundle in APM_BUNDLE_DIR
+ * 2. Extract to a temporary directory (with path-traversal / symlink guards)
+ * 3. Locate the single top-level directory inside the extracted archive
+ * 4. Read apm.lock.yaml from the bundle
+ * 5. Collect the deduplicated deployed_files list from all dependencies
+ * 6. Verify that every listed file actually exists in the bundle
+ * 7. Copy files (additive, never deletes) to OUTPUT_DIR
+ * 8. Clean up the temporary directory
+ *
+ * Environment variables:
+ * APM_BUNDLE_DIR – directory containing the *.tar.gz bundle
+ * (default: /tmp/gh-aw/apm-bundle)
+ * OUTPUT_DIR – destination directory for deployed files
+ * (default: GITHUB_WORKSPACE, then process.cwd())
+ *
+ * @module apm_unpack
+ */
+
+const fs = require("fs");
+const path = require("path");
+const os = require("os");
+
+/** Lockfile filename used by current APM versions. */
+const LOCKFILE_NAME = "apm.lock.yaml";
+
+// ---------------------------------------------------------------------------
+// YAML parser
+// ---------------------------------------------------------------------------
+
+/**
+ * Unquote a YAML scalar value produced by PyYAML's safe_dump.
+ *
+ * Handles:
+ * - single-quoted strings: 'value'
+ * - double-quoted strings: "value"
+ * - null / ~ literals
+ * - boolean literals: true / false
+ * - integers
+ * - bare strings (returned as-is)
+ *
+ * @param {string} raw
+ * @returns {string | number | boolean | null}
+ */
+function unquoteYaml(raw) {
+ if (raw === undefined || raw === null) return null;
+ const s = raw.trim();
+ if (s === "" || s === "~" || s === "null") return null;
+ if (s === "true") return true;
+ if (s === "false") return false;
+ if (/^-?\d+$/.test(s)) return parseInt(s, 10);
+ if (/^-?\d+\.\d+$/.test(s)) return parseFloat(s);
+ // Strip surrounding quotes
+ if ((s.startsWith("'") && s.endsWith("'")) || (s.startsWith('"') && s.endsWith('"'))) {
+ return s.slice(1, -1);
+ }
+ return s;
+}
+
+/**
+ * @typedef {Object} LockedDependency
+ * @property {string} repo_url
+ * @property {string | null} host
+ * @property {string | null} resolved_commit
+ * @property {string | null} resolved_ref
+ * @property {string | null} version
+ * @property {string | null} virtual_path
+ * @property {boolean} is_virtual
+ * @property {number} depth
+ * @property {string | null} resolved_by
+ * @property {string | null} package_type
+ * @property {string[]} deployed_files
+ * @property {string | null} source
+ * @property {string | null} local_path
+ * @property {string | null} content_hash
+ * @property {boolean} is_dev
+ */
+
+/**
+ * @typedef {Object} APMLockfile
+ * @property {string | null} lockfile_version
+ * @property {string | null} generated_at
+ * @property {string | null} apm_version
+ * @property {LockedDependency[]} dependencies
+ * @property {Record} pack
+ */
+
+/**
+ * Parse an APM lockfile (apm.lock.yaml) from a YAML string.
+ *
+ * This is a targeted parser for the specific output produced by PyYAML's
+ * safe_dump (default_flow_style=False, sort_keys=False). The format is:
+ *
+ * lockfile_version: '1' <- top-level scalar
+ * dependencies: <- top-level sequence key
+ * - repo_url: https://... <- first key of a mapping item
+ * deployed_files: <- nested sequence key (2-space indent)
+ * - .github/skills/foo/ <- sequence items (2-space indent)
+ * pack: <- top-level mapping key
+ * target: claude <- nested scalars (2-space indent)
+ *
+ * @param {string} content - Raw YAML string content of the lockfile.
+ * @returns {APMLockfile}
+ */
+function parseAPMLockfile(content) {
+ /** @type {APMLockfile} */
+ const result = {
+ lockfile_version: null,
+ generated_at: null,
+ apm_version: null,
+ dependencies: [],
+ pack: {},
+ };
+
+ const lines = content.split("\n");
+
+ // Parser states
+ const STATE_TOP = "top";
+ const STATE_DEPS = "dependencies";
+ const STATE_DEP_ITEM = "dep_item";
+ const STATE_DEPLOYED_FILES = "deployed_files";
+ const STATE_PACK = "pack";
+
+ let state = STATE_TOP;
+ /** @type {LockedDependency | null} */
+ let currentDep = null;
+
+ for (let i = 0; i < lines.length; i++) {
+ const line = lines[i];
+
+ // Skip blank lines and YAML comments
+ if (!line.trim() || line.trim().startsWith("#")) continue;
+
+ switch (state) {
+ case STATE_TOP: {
+ if (line === "dependencies:") {
+ state = STATE_DEPS;
+ break;
+ }
+ if (line === "pack:" || line.startsWith("pack: ")) {
+ // pack may be a mapping block ("pack:") or an inline scalar ("pack: value")
+ if (line === "pack:") {
+ state = STATE_PACK;
+ } else {
+ const v = line.slice("pack:".length).trim();
+ if (v) result.pack["_value"] = unquoteYaml(v);
+ }
+ break;
+ }
+ // Top-level scalar: key: value
+ const topMatch = line.match(/^([\w-]+):\s*(.*)$/);
+ if (topMatch) {
+ const k = topMatch[1];
+ const v = unquoteYaml(topMatch[2]);
+ // @ts-ignore – dynamic key assignment on typed result
+ result[k] = v;
+ }
+ break;
+ }
+
+ case STATE_DEPS: {
+ if (line.startsWith("- ")) {
+ // New dependency mapping item – save previous if any
+ if (currentDep) result.dependencies.push(currentDep);
+ currentDep = makeEmptyDep();
+ state = STATE_DEP_ITEM;
+ // The first key is on the same line as "- "
+ const m = line.match(/^- ([\w-]+):\s*(.*)$/);
+ if (m) assignDepField(currentDep, m[1], unquoteYaml(m[2]));
+ break;
+ }
+ // Exiting dependencies section (non-indented, non-list line)
+ if (!line.startsWith(" ")) {
+ if (currentDep) {
+ result.dependencies.push(currentDep);
+ currentDep = null;
+ }
+ state = STATE_TOP;
+ i--; // re-process this line
+ }
+ break;
+ }
+
+ case STATE_DEP_ITEM: {
+ if (line.startsWith("- ")) {
+ // Start of the next dependency item
+ if (currentDep) result.dependencies.push(currentDep);
+ currentDep = makeEmptyDep();
+ const m = line.match(/^- ([\w-]+):\s*(.*)$/);
+ if (m) assignDepField(currentDep, m[1], unquoteYaml(m[2]));
+ break;
+ }
+ // 2-space indented key inside the mapping
+ const depKeyMatch = line.match(/^ ([\w-]+):\s*(.*)$/);
+ if (depKeyMatch) {
+ const k = depKeyMatch[1];
+ if (k === "deployed_files") {
+ state = STATE_DEPLOYED_FILES;
+ } else {
+ if (currentDep) assignDepField(currentDep, k, unquoteYaml(depKeyMatch[2]));
+ }
+ break;
+ }
+ // Exiting dependencies section
+ if (!line.startsWith(" ")) {
+ if (currentDep) {
+ result.dependencies.push(currentDep);
+ currentDep = null;
+ }
+ state = STATE_TOP;
+ i--;
+ }
+ break;
+ }
+
+ case STATE_DEPLOYED_FILES: {
+ // deployed_files list items are at 2-space indent: " - path"
+ const fileMatch = line.match(/^ - (.+)$/);
+ if (fileMatch) {
+ if (currentDep) currentDep.deployed_files.push(String(unquoteYaml(String(fileMatch[1].trim()))));
+ break;
+ }
+ // Any other 2-space key: back to dep_item
+ if (line.match(/^ [\w-]+:/)) {
+ state = STATE_DEP_ITEM;
+ i--; // re-process
+ break;
+ }
+ // New dependency item
+ if (line.startsWith("- ")) {
+ if (currentDep) result.dependencies.push(currentDep);
+ currentDep = makeEmptyDep();
+ state = STATE_DEP_ITEM;
+ const m = line.match(/^- ([\w-]+):\s*(.*)$/);
+ if (m) assignDepField(currentDep, m[1], unquoteYaml(m[2]));
+ break;
+ }
+ // Exiting dependencies
+ if (!line.startsWith(" ")) {
+ if (currentDep) {
+ result.dependencies.push(currentDep);
+ currentDep = null;
+ }
+ state = STATE_TOP;
+ i--;
+ }
+ break;
+ }
+
+ case STATE_PACK: {
+ const packKeyMatch = line.match(/^ ([\w-]+):\s*(.*)$/);
+ if (packKeyMatch) {
+ result.pack[packKeyMatch[1]] = unquoteYaml(packKeyMatch[2]);
+ break;
+ }
+ // Exiting pack mapping
+ if (!line.startsWith(" ")) {
+ state = STATE_TOP;
+ i--;
+ }
+ break;
+ }
+ }
+ }
+
+ // Flush the last dependency
+ if (currentDep) result.dependencies.push(currentDep);
+
+ return result;
+}
+
+/**
+ * @returns {LockedDependency}
+ */
+function makeEmptyDep() {
+ return {
+ repo_url: "",
+ host: null,
+ resolved_commit: null,
+ resolved_ref: null,
+ version: null,
+ virtual_path: null,
+ is_virtual: false,
+ depth: 1,
+ resolved_by: null,
+ package_type: null,
+ deployed_files: [],
+ source: null,
+ local_path: null,
+ content_hash: null,
+ is_dev: false,
+ };
+}
+
+/**
+ * Assign a parsed YAML field to a LockedDependency object.
+ * @param {LockedDependency} dep
+ * @param {string} key
+ * @param {string | number | boolean | null} value
+ */
+function assignDepField(dep, key, value) {
+ switch (key) {
+ case "repo_url":
+ dep.repo_url = String(value ?? "");
+ break;
+ case "host":
+ dep.host = value !== null ? String(value) : null;
+ break;
+ case "resolved_commit":
+ dep.resolved_commit = value !== null ? String(value) : null;
+ break;
+ case "resolved_ref":
+ dep.resolved_ref = value !== null ? String(value) : null;
+ break;
+ case "version":
+ dep.version = value !== null ? String(value) : null;
+ break;
+ case "virtual_path":
+ dep.virtual_path = value !== null ? String(value) : null;
+ break;
+ case "is_virtual":
+ dep.is_virtual = value === true || value === "true";
+ break;
+ case "depth":
+ dep.depth = typeof value === "number" ? value : parseInt(String(value ?? "1"), 10);
+ break;
+ case "resolved_by":
+ dep.resolved_by = value !== null ? String(value) : null;
+ break;
+ case "package_type":
+ dep.package_type = value !== null ? String(value) : null;
+ break;
+ case "source":
+ dep.source = value !== null ? String(value) : null;
+ break;
+ case "local_path":
+ dep.local_path = value !== null ? String(value) : null;
+ break;
+ case "content_hash":
+ dep.content_hash = value !== null ? String(value) : null;
+ break;
+ case "is_dev":
+ dep.is_dev = value === true || value === "true";
+ break;
+ default:
+ // Unknown field – ignore silently
+ break;
+ }
+}
+
+// ---------------------------------------------------------------------------
+// Bundle location helpers
+// ---------------------------------------------------------------------------
+
+/**
+ * Find the first *.tar.gz file in the given directory.
+ *
+ * @param {string} bundleDir - Directory that contains the bundle archive.
+ * @returns {string} Absolute path to the tar.gz file.
+ * @throws {Error} If no bundle file is found.
+ */
+function findBundleFile(bundleDir) {
+ core.info(`[APM Unpack] Scanning bundle directory: ${bundleDir}`);
+
+ if (!fs.existsSync(bundleDir)) {
+ throw new Error(`APM bundle directory not found: ${bundleDir}`);
+ }
+
+ const entries = fs.readdirSync(bundleDir);
+ core.info(`[APM Unpack] Found ${entries.length} entries in bundle directory: ${entries.join(", ")}`);
+
+ const tarGzFiles = entries.filter(e => e.endsWith(".tar.gz"));
+ if (tarGzFiles.length === 0) {
+ throw new Error(`No *.tar.gz bundle found in ${bundleDir}. ` + `Contents: ${entries.length === 0 ? "(empty)" : entries.join(", ")}`);
+ }
+ if (tarGzFiles.length > 1) {
+ core.warning(`[APM Unpack] Multiple bundles found in ${bundleDir}: ${tarGzFiles.join(", ")}. ` + `Using the first one: ${tarGzFiles[0]}`);
+ }
+
+ const bundlePath = path.join(bundleDir, tarGzFiles[0]);
+ core.info(`[APM Unpack] Selected bundle: ${bundlePath}`);
+ return bundlePath;
+}
+
+/**
+ * After extracting the tar.gz, locate the inner content directory.
+ *
+ * The APM packer creates archives with a single top-level directory
+ * (e.g. "my-package-1.2.3/") that wraps all bundle contents.
+ * If no such single directory exists, the extraction root is returned.
+ *
+ * @param {string} extractedDir - Root of the extracted archive.
+ * @returns {string} Path to the source directory containing apm.lock.yaml.
+ */
+function findSourceDir(extractedDir) {
+ const entries = fs.readdirSync(extractedDir, { withFileTypes: true });
+ const dirs = entries.filter(e => e.isDirectory() && !e.isSymbolicLink());
+
+ if (dirs.length === 1 && entries.length === 1) {
+ // Single top-level directory: this is the bundle root
+ const sourceDir = path.join(extractedDir, dirs[0].name);
+ core.info(`[APM Unpack] Bundle root directory: ${sourceDir}`);
+ return sourceDir;
+ }
+
+ // Multiple entries or no subdirectory: use extractedDir itself
+ core.info(`[APM Unpack] No single top-level directory found (${entries.length} entries). ` + `Using extracted root: ${extractedDir}`);
+ return extractedDir;
+}
+
+/**
+ * Locate the lockfile inside the source directory.
+ *
+ * @param {string} sourceDir
+ * @returns {string} Absolute path to the lockfile.
+ * @throws {Error} If the lockfile is not found.
+ */
+function findLockfile(sourceDir) {
+ const primary = path.join(sourceDir, LOCKFILE_NAME);
+ if (fs.existsSync(primary)) {
+ core.info(`[APM Unpack] Found lockfile: ${primary}`);
+ return primary;
+ }
+ // List source dir for debugging
+ const entries = fs.readdirSync(sourceDir).join(", ");
+ throw new Error(`${LOCKFILE_NAME} not found in bundle. ` + `Source directory (${sourceDir}) contains: ${entries || "(empty)"}`);
+}
+
+// ---------------------------------------------------------------------------
+// File collection and verification
+// ---------------------------------------------------------------------------
+
+/**
+ * Walk all dependencies in the lockfile and return a deduplicated, ordered list
+ * of deployed_files paths together with a per-dependency map.
+ *
+ * Mirrors the Python unpacker's collection loop:
+ * for dep in lockfile.get_all_dependencies():
+ * for f in dep.deployed_files:
+ * ...unique_files.append(f)
+ *
+ * @param {APMLockfile} lockfile
+ * @returns {{ uniqueFiles: string[], depFileMap: Record }}
+ */
+function collectDeployedFiles(lockfile) {
+ /** @type {Set} */
+ const seen = new Set();
+ /** @type {string[]} */
+ const uniqueFiles = [];
+ /** @type {Record} */
+ const depFileMap = {};
+
+ for (const dep of lockfile.dependencies) {
+ const depKey = dep.is_virtual && dep.virtual_path ? `${dep.repo_url}/${dep.virtual_path}` : dep.source === "local" && dep.local_path ? dep.local_path : dep.repo_url;
+
+ /** @type {string[]} */
+ const depFiles = [];
+ for (const f of dep.deployed_files) {
+ depFiles.push(f);
+ if (!seen.has(f)) {
+ seen.add(f);
+ uniqueFiles.push(f);
+ }
+ }
+ if (depFiles.length > 0) {
+ depFileMap[depKey] = depFiles;
+ }
+ }
+
+ return { uniqueFiles, depFileMap };
+}
+
+/**
+ * Verify that every file listed in deployed_files actually exists in the bundle.
+ *
+ * @param {string} sourceDir - Extracted bundle directory.
+ * @param {string[]} uniqueFiles - Deduplicated list of relative file paths.
+ * @throws {Error} If any listed file is missing from the bundle.
+ */
+function verifyBundleContents(sourceDir, uniqueFiles) {
+ const missing = uniqueFiles.filter(f => {
+ const candidate = path.join(sourceDir, f);
+ return !fs.existsSync(candidate);
+ });
+
+ if (missing.length > 0) {
+ throw new Error(`Bundle verification failed – the following deployed files are missing from the bundle:\n` + missing.map(m => ` - ${m}`).join("\n"));
+ }
+ core.info(`[APM Unpack] Bundle verification passed (${uniqueFiles.length} file(s) verified)`);
+}
+
+// ---------------------------------------------------------------------------
+// Security helpers
+// ---------------------------------------------------------------------------
+
+/**
+ * Validate that a relative path from the lockfile is safe to deploy.
+ * Rejects absolute paths and path-traversal attempts (mirrors unpacker.py).
+ *
+ * @param {string} relPath - Relative path string from deployed_files.
+ * @throws {Error} If the path is unsafe.
+ */
+function assertSafePath(relPath) {
+ if (path.isAbsolute(relPath) || relPath.startsWith("/")) {
+ throw new Error(`Refusing to unpack unsafe absolute path from bundle lockfile: ${JSON.stringify(relPath)}`);
+ }
+ const parts = relPath.split(/[\\/]/);
+ if (parts.includes("..")) {
+ throw new Error(`Refusing to unpack path-traversal entry from bundle lockfile: ${JSON.stringify(relPath)}`);
+ }
+}
+
+/**
+ * Verify that the resolved destination path stays within outputDirResolved.
+ *
+ * @param {string} destPath - Absolute destination path.
+ * @param {string} outputDirResolved - Resolved absolute output directory.
+ * @throws {Error} If the dest escapes the output directory.
+ */
+function assertDestInsideOutput(destPath, outputDirResolved) {
+ const resolved = path.resolve(destPath);
+ if (!resolved.startsWith(outputDirResolved + path.sep) && resolved !== outputDirResolved) {
+ throw new Error(`Refusing to unpack path that escapes the output directory: ${JSON.stringify(destPath)}`);
+ }
+}
+
+// ---------------------------------------------------------------------------
+// Copy helpers
+// ---------------------------------------------------------------------------
+
+/**
+ * Recursively copy a directory tree from src to dest, skipping symbolic links.
+ * Parent directories are created automatically.
+ *
+ * @param {string} src - Source directory.
+ * @param {string} dest - Destination directory.
+ * @returns {number} Number of files copied.
+ */
+function copyDirRecursive(src, dest) {
+ let count = 0;
+ const entries = fs.readdirSync(src, { withFileTypes: true });
+ for (const entry of entries) {
+ const srcPath = path.join(src, entry.name);
+ const destPath = path.join(dest, entry.name);
+ if (entry.isSymbolicLink()) {
+ // Security: skip symlinks (mirrors unpacker.py's ignore_symlinks)
+ core.warning(`[APM Unpack] Skipping symlink: ${srcPath}`);
+ continue;
+ }
+ if (entry.isDirectory()) {
+ fs.mkdirSync(destPath, { recursive: true });
+ count += copyDirRecursive(srcPath, destPath);
+ } else if (entry.isFile()) {
+ fs.mkdirSync(path.dirname(destPath), { recursive: true });
+ fs.copyFileSync(srcPath, destPath);
+ count++;
+ }
+ }
+ return count;
+}
+
+// ---------------------------------------------------------------------------
+// Main unpack function
+// ---------------------------------------------------------------------------
+
+/**
+ * @typedef {Object} UnpackResult
+ * @property {string} bundlePath - Path to the original bundle archive.
+ * @property {string[]} files - Unique list of deployed file paths.
+ * @property {boolean} verified - Whether bundle completeness was verified.
+ * @property {Record} dependencyFiles - Files per dependency key.
+ * @property {number} skippedCount - Files skipped (symlinks, missing).
+ * @property {Record} packMeta - Pack metadata from lockfile.
+ */
+
+/**
+ * Extract and apply an APM bundle to an output directory.
+ *
+ * This is the core implementation that mirrors the Python unpack_bundle()
+ * function in unpacker.py. All extraction and copying is done with the same
+ * additive-only, symlink-skipping, path-traversal-checking semantics.
+ *
+ * @param {object} params
+ * @param {string} params.bundleDir - Directory containing the *.tar.gz bundle.
+ * @param {string} params.outputDir - Target directory to copy files into.
+ * @param {boolean} [params.skipVerify] - Skip completeness verification.
+ * @param {boolean} [params.dryRun] - Resolve file list but write nothing.
+ * @returns {Promise}
+ */
+async function unpackBundle({ bundleDir, outputDir, skipVerify = false, dryRun = false }) {
+ core.info("=== APM Bundle Unpack ===");
+ core.info(`[APM Unpack] Bundle directory : ${bundleDir}`);
+ core.info(`[APM Unpack] Output directory : ${outputDir}`);
+ core.info(`[APM Unpack] Skip verify : ${skipVerify}`);
+ core.info(`[APM Unpack] Dry run : ${dryRun}`);
+
+ // 1. Find the archive
+ const bundlePath = findBundleFile(bundleDir);
+
+ // 2. Extract to temporary directory
+ const tempDir = fs.mkdtempSync(path.join(os.tmpdir(), "apm-unpack-"));
+ core.info(`[APM Unpack] Temp directory : ${tempDir}`);
+
+ let sourceDir;
+ try {
+ core.info(`[APM Unpack] Extracting archive: ${bundlePath}`);
+ await exec.exec("tar", ["-xzf", bundlePath, "-C", tempDir]);
+ core.info(`[APM Unpack] Extraction complete`);
+
+ // 3. Find the inner bundle directory
+ sourceDir = findSourceDir(tempDir);
+
+ // List bundle contents for debugging
+ const allBundleFiles = listDirRecursive(sourceDir);
+ core.info(`[APM Unpack] Bundle contains ${allBundleFiles.length} file(s):`);
+ allBundleFiles.slice(0, 50).forEach(f => core.info(` ${f}`));
+ if (allBundleFiles.length > 50) {
+ core.info(` ... and ${allBundleFiles.length - 50} more`);
+ }
+
+ // 4. Read lockfile
+ const lockfilePath = findLockfile(sourceDir);
+ const lockfileContent = fs.readFileSync(lockfilePath, "utf-8");
+ core.info(`[APM Unpack] Lockfile size: ${lockfileContent.length} bytes`);
+
+ // 5. Parse lockfile
+ const lockfile = parseAPMLockfile(lockfileContent);
+ core.info(`[APM Unpack] Lockfile version : ${lockfile.lockfile_version}`);
+ core.info(`[APM Unpack] APM version : ${lockfile.apm_version}`);
+ core.info(`[APM Unpack] Dependencies : ${lockfile.dependencies.length}`);
+
+ if (lockfile.pack && Object.keys(lockfile.pack).length > 0) {
+ core.info(`[APM Unpack] Pack metadata : ${JSON.stringify(lockfile.pack)}`);
+ }
+
+ for (const dep of lockfile.dependencies) {
+ core.info(`[APM Unpack] dep: ${dep.repo_url}` + (dep.resolved_ref ? `@${dep.resolved_ref}` : "") + (dep.resolved_commit ? ` (${dep.resolved_commit.slice(0, 8)})` : "") + ` – ${dep.deployed_files.length} file(s)`);
+ dep.deployed_files.forEach(f => core.info(` → ${f}`));
+ }
+
+ // 6. Collect deployed files (deduplicated)
+ const { uniqueFiles, depFileMap } = collectDeployedFiles(lockfile);
+ core.info(`[APM Unpack] Total deployed files (deduplicated): ${uniqueFiles.length}`);
+
+ // 7. Verify bundle completeness
+ if (!skipVerify) {
+ verifyBundleContents(sourceDir, uniqueFiles);
+ } else {
+ core.info("[APM Unpack] Skipping bundle verification (skipVerify=true)");
+ }
+
+ const verified = !skipVerify;
+
+ // 8. Dry-run early exit
+ if (dryRun) {
+ core.info("[APM Unpack] Dry-run mode: resolved file list without writing");
+ return {
+ bundlePath,
+ files: uniqueFiles,
+ verified,
+ dependencyFiles: depFileMap,
+ skippedCount: 0,
+ packMeta: lockfile.pack,
+ };
+ }
+
+ // 9. Copy files to output directory (additive only, never deletes)
+ const outputDirResolved = path.resolve(outputDir);
+ fs.mkdirSync(outputDirResolved, { recursive: true });
+
+ let skipped = 0;
+ let copied = 0;
+
+ for (const relPath of uniqueFiles) {
+ // Guard: reject unsafe paths from the lockfile
+ assertSafePath(relPath);
+
+ const dest = path.join(outputDirResolved, relPath);
+ assertDestInsideOutput(dest, outputDirResolved);
+
+ // Strip trailing slash for path operations (directories end with /)
+ const relPathClean = relPath.endsWith("/") ? relPath.slice(0, -1) : relPath;
+ const src = path.join(sourceDir, relPathClean);
+
+ if (!fs.existsSync(src)) {
+ core.warning(`[APM Unpack] Skipping missing entry: ${relPath}`);
+ skipped++;
+ continue;
+ }
+
+ // Security: skip symlinks
+ const srcLstat = fs.lstatSync(src);
+ if (srcLstat.isSymbolicLink()) {
+ core.warning(`[APM Unpack] Skipping symlink: ${relPath}`);
+ skipped++;
+ continue;
+ }
+
+ if (srcLstat.isDirectory() || relPath.endsWith("/")) {
+ core.info(`[APM Unpack] Copying directory: ${relPath}`);
+ const destDir = path.join(outputDirResolved, relPathClean);
+ fs.mkdirSync(destDir, { recursive: true });
+ const n = copyDirRecursive(src, destDir);
+ core.info(`[APM Unpack] → Copied ${n} file(s) from ${relPath}`);
+ copied += n;
+ } else {
+ core.info(`[APM Unpack] Copying file: ${relPath}`);
+ fs.mkdirSync(path.dirname(dest), { recursive: true });
+ fs.copyFileSync(src, dest);
+ copied++;
+ }
+ }
+
+ core.info(`[APM Unpack] Done: ${copied} file(s) copied, ${skipped} skipped`);
+ core.info(`[APM Unpack] Deployed to: ${outputDirResolved}`);
+
+ // Log what was deployed for easy verification
+ const deployedFiles = listDirRecursive(outputDirResolved);
+ core.info(`[APM Unpack] Output directory now contains ${deployedFiles.length} file(s) (top-level snapshot):`);
+ deployedFiles.slice(0, 30).forEach(f => core.info(` ${f}`));
+
+ return {
+ bundlePath,
+ files: uniqueFiles,
+ verified,
+ dependencyFiles: depFileMap,
+ skippedCount: skipped,
+ packMeta: lockfile.pack,
+ };
+ } finally {
+ // Always clean up temp directory
+ try {
+ fs.rmSync(tempDir, { recursive: true, force: true });
+ core.info(`[APM Unpack] Cleaned up temp directory: ${tempDir}`);
+ } catch (cleanupErr) {
+ core.warning(`[APM Unpack] Failed to clean up temp directory ${tempDir}: ${cleanupErr}`);
+ }
+ }
+}
+
+/**
+ * List all file paths recursively under dir, relative to dir.
+ * Symbolic links are skipped.
+ *
+ * @param {string} dir
+ * @returns {string[]}
+ */
+function listDirRecursive(dir) {
+ /** @type {string[]} */
+ const result = [];
+ try {
+ const entries = fs.readdirSync(dir, { withFileTypes: true });
+ for (const entry of entries) {
+ if (entry.isSymbolicLink()) continue;
+ const rel = entry.name;
+ if (entry.isDirectory()) {
+ const sub = listDirRecursive(path.join(dir, entry.name));
+ result.push(...sub.map(s => rel + "/" + s));
+ } else {
+ result.push(rel);
+ }
+ }
+ } catch {
+ // Best-effort listing
+ }
+ return result;
+}
+
+// ---------------------------------------------------------------------------
+// Entry point
+// ---------------------------------------------------------------------------
+
+/**
+ * Main entry point called by the github-script step.
+ *
+ * Reads configuration from environment variables:
+ * APM_BUNDLE_DIR – directory with the bundle tar.gz (default: /tmp/gh-aw/apm-bundle)
+ * OUTPUT_DIR – destination for deployed files (default: GITHUB_WORKSPACE)
+ */
+async function main() {
+ const bundleDir = process.env.APM_BUNDLE_DIR || "/tmp/gh-aw/apm-bundle";
+ const outputDir = process.env.OUTPUT_DIR || process.env.GITHUB_WORKSPACE || process.cwd();
+
+ core.info("[APM Unpack] Starting APM bundle unpacking");
+ core.info(`[APM Unpack] APM_BUNDLE_DIR : ${bundleDir}`);
+ core.info(`[APM Unpack] OUTPUT_DIR : ${outputDir}`);
+
+ try {
+ const result = await unpackBundle({ bundleDir, outputDir });
+
+ core.info("[APM Unpack] ✅ APM bundle unpacked successfully");
+ core.info(`[APM Unpack] Files deployed : ${result.files.length}`);
+ core.info(`[APM Unpack] Files skipped : ${result.skippedCount}`);
+ core.info(`[APM Unpack] Verified : ${result.verified}`);
+ } catch (err) {
+ const msg = err instanceof Error ? err.message : String(err);
+ core.error(`[APM Unpack] ❌ Failed to unpack APM bundle: ${msg}`);
+ throw err;
+ }
+}
+
+module.exports = {
+ main,
+ unpackBundle,
+ parseAPMLockfile,
+ unquoteYaml,
+ collectDeployedFiles,
+ findBundleFile,
+ findSourceDir,
+ findLockfile,
+ verifyBundleContents,
+ assertSafePath,
+ assertDestInsideOutput,
+ copyDirRecursive,
+ listDirRecursive,
+};
diff --git a/actions/setup/js/apm_unpack.test.cjs b/actions/setup/js/apm_unpack.test.cjs
new file mode 100644
index 00000000000..7ec966b8ac1
--- /dev/null
+++ b/actions/setup/js/apm_unpack.test.cjs
@@ -0,0 +1,980 @@
+// @ts-check
+///
+
+import { describe, it, expect, beforeEach, afterEach, vi } from "vitest";
+const fs = require("fs");
+const path = require("path");
+const os = require("os");
+
+// ---------------------------------------------------------------------------
+// Global mock setup
+// ---------------------------------------------------------------------------
+
+const mockCore = {
+ info: vi.fn(),
+ warning: vi.fn(),
+ error: vi.fn(),
+ setFailed: vi.fn(),
+ setOutput: vi.fn(),
+};
+
+const mockExec = {
+ exec: vi.fn(),
+};
+
+// Establish globals before requiring the module
+global.core = mockCore;
+global.exec = mockExec;
+
+const {
+ parseAPMLockfile,
+ unquoteYaml,
+ collectDeployedFiles,
+ findBundleFile,
+ findSourceDir,
+ findLockfile,
+ verifyBundleContents,
+ assertSafePath,
+ assertDestInsideOutput,
+ copyDirRecursive,
+ listDirRecursive,
+ unpackBundle,
+} = require("./apm_unpack.cjs");
+
+// ---------------------------------------------------------------------------
+// Helpers
+// ---------------------------------------------------------------------------
+
+/** Create a temp directory and return its path. */
+function makeTempDir() {
+ return fs.mkdtempSync(path.join(os.tmpdir(), "apm-unpack-test-"));
+}
+
+/** Remove a directory recursively (best-effort). */
+function removeTempDir(dir) {
+ if (dir && fs.existsSync(dir)) {
+ fs.rmSync(dir, { recursive: true, force: true });
+ }
+}
+
+/** Write a file, creating parent directories as needed. */
+function writeFile(dir, relPath, content = "content") {
+ const full = path.join(dir, relPath);
+ fs.mkdirSync(path.dirname(full), { recursive: true });
+ fs.writeFileSync(full, content, "utf-8");
+ return full;
+}
+
+/**
+ * Minimal valid apm.lock.yaml content for a single dependency.
+ * @param {object} [overrides]
+ */
+function minimalLockfile({ repoUrl = "https://github.com/owner/repo", files = [".github/skills/foo/"] } = {}) {
+ const fileLines = files.map(f => ` - ${f}`).join("\n");
+ return `lockfile_version: '1'
+generated_at: '2024-01-15T10:00:00.000000+00:00'
+apm_version: 0.8.5
+dependencies:
+- repo_url: ${repoUrl}
+ host: github.com
+ resolved_commit: abc123def456789
+ resolved_ref: main
+ version: '1.0.0'
+ depth: 1
+ package_type: generic
+ deployed_files:
+${fileLines}
+`;
+}
+
+// ---------------------------------------------------------------------------
+// unquoteYaml
+// ---------------------------------------------------------------------------
+
+describe("unquoteYaml", () => {
+ it("returns null for empty/null/undefined/~ values", () => {
+ expect(unquoteYaml("")).toBeNull();
+ expect(unquoteYaml("~")).toBeNull();
+ expect(unquoteYaml("null")).toBeNull();
+ expect(unquoteYaml(null)).toBeNull();
+ expect(unquoteYaml(undefined)).toBeNull();
+ });
+
+ it("parses boolean literals", () => {
+ expect(unquoteYaml("true")).toBe(true);
+ expect(unquoteYaml("false")).toBe(false);
+ });
+
+ it("parses integer literals", () => {
+ expect(unquoteYaml("0")).toBe(0);
+ expect(unquoteYaml("1")).toBe(1);
+ expect(unquoteYaml("42")).toBe(42);
+ expect(unquoteYaml("-7")).toBe(-7);
+ });
+
+ it("parses float literals", () => {
+ expect(unquoteYaml("3.14")).toBeCloseTo(3.14);
+ expect(unquoteYaml("-1.5")).toBeCloseTo(-1.5);
+ });
+
+ it("strips single quotes", () => {
+ expect(unquoteYaml("'hello'")).toBe("hello");
+ expect(unquoteYaml("'1'")).toBe("1");
+ expect(unquoteYaml("'true'")).toBe("true");
+ });
+
+ it("strips double quotes", () => {
+ expect(unquoteYaml('"world"')).toBe("world");
+ expect(unquoteYaml('"2024-01-01"')).toBe("2024-01-01");
+ });
+
+ it("returns bare strings unchanged", () => {
+ expect(unquoteYaml("main")).toBe("main");
+ expect(unquoteYaml("github.com")).toBe("github.com");
+ expect(unquoteYaml("https://github.com/owner/repo")).toBe("https://github.com/owner/repo");
+ });
+
+ it("trims surrounding whitespace before processing", () => {
+ expect(unquoteYaml(" 'hello' ")).toBe("hello");
+ expect(unquoteYaml(" 42 ")).toBe(42);
+ });
+});
+
+// ---------------------------------------------------------------------------
+// parseAPMLockfile – basic structure
+// ---------------------------------------------------------------------------
+
+describe("parseAPMLockfile – top-level fields", () => {
+ it("parses lockfile_version, generated_at, apm_version", () => {
+ const yaml = `lockfile_version: '1'
+generated_at: '2024-01-15T10:00:00.000000+00:00'
+apm_version: 0.8.5
+dependencies:
+`;
+ const result = parseAPMLockfile(yaml);
+ expect(result.lockfile_version).toBe("1");
+ expect(result.generated_at).toBe("2024-01-15T10:00:00.000000+00:00");
+ expect(result.apm_version).toBe("0.8.5");
+ expect(result.dependencies).toHaveLength(0);
+ });
+
+ it("handles missing optional fields gracefully", () => {
+ const yaml = `lockfile_version: '1'
+dependencies:
+`;
+ const result = parseAPMLockfile(yaml);
+ expect(result.lockfile_version).toBe("1");
+ expect(result.apm_version).toBeNull();
+ expect(result.dependencies).toHaveLength(0);
+ });
+
+ it("parses pack metadata block", () => {
+ const yaml = `lockfile_version: '1'
+dependencies:
+pack:
+ target: claude
+ format: apm
+ generated_at: '2024-01-15T10:00:00.000000+00:00'
+`;
+ const result = parseAPMLockfile(yaml);
+ expect(result.pack.target).toBe("claude");
+ expect(result.pack.format).toBe("apm");
+ });
+
+ it("returns empty result for empty/blank input", () => {
+ const result = parseAPMLockfile("");
+ expect(result.dependencies).toHaveLength(0);
+ expect(result.lockfile_version).toBeNull();
+ });
+
+ it("ignores YAML comment lines", () => {
+ const yaml = `# This is a comment
+lockfile_version: '1'
+# Another comment
+dependencies:
+`;
+ const result = parseAPMLockfile(yaml);
+ expect(result.lockfile_version).toBe("1");
+ });
+});
+
+// ---------------------------------------------------------------------------
+// parseAPMLockfile – dependency items
+// ---------------------------------------------------------------------------
+
+describe("parseAPMLockfile – dependencies", () => {
+ it("parses a single dependency with deployed_files", () => {
+ const yaml = minimalLockfile({
+ repoUrl: "https://github.com/microsoft/apm-sample-package",
+ files: [".github/skills/my-skill/", ".claude/skills/my-skill/"],
+ });
+ const result = parseAPMLockfile(yaml);
+
+ expect(result.dependencies).toHaveLength(1);
+ const dep = result.dependencies[0];
+ expect(dep.repo_url).toBe("https://github.com/microsoft/apm-sample-package");
+ expect(dep.host).toBe("github.com");
+ expect(dep.resolved_commit).toBe("abc123def456789");
+ expect(dep.resolved_ref).toBe("main");
+ expect(dep.version).toBe("1.0.0");
+ expect(dep.depth).toBe(1);
+ expect(dep.package_type).toBe("generic");
+ expect(dep.deployed_files).toEqual([".github/skills/my-skill/", ".claude/skills/my-skill/"]);
+ });
+
+ it("parses multiple dependencies", () => {
+ const yaml = `lockfile_version: '1'
+dependencies:
+- repo_url: https://github.com/owner/pkg-a
+ host: github.com
+ resolved_commit: aaaa
+ resolved_ref: main
+ depth: 1
+ deployed_files:
+ - .github/skills/pkg-a/
+- repo_url: https://github.com/owner/pkg-b
+ host: github.com
+ resolved_commit: bbbb
+ resolved_ref: v2
+ depth: 1
+ deployed_files:
+ - .github/skills/pkg-b/
+ - .claude/skills/pkg-b/
+`;
+ const result = parseAPMLockfile(yaml);
+ expect(result.dependencies).toHaveLength(2);
+ expect(result.dependencies[0].repo_url).toBe("https://github.com/owner/pkg-a");
+ expect(result.dependencies[0].deployed_files).toEqual([".github/skills/pkg-a/"]);
+ expect(result.dependencies[1].repo_url).toBe("https://github.com/owner/pkg-b");
+ expect(result.dependencies[1].deployed_files).toEqual([".github/skills/pkg-b/", ".claude/skills/pkg-b/"]);
+ });
+
+ it("handles dependency with no deployed_files", () => {
+ const yaml = `lockfile_version: '1'
+dependencies:
+- repo_url: https://github.com/owner/empty-pkg
+ host: github.com
+ depth: 1
+`;
+ const result = parseAPMLockfile(yaml);
+ expect(result.dependencies).toHaveLength(1);
+ expect(result.dependencies[0].deployed_files).toEqual([]);
+ });
+
+ it("parses boolean fields: is_virtual, is_dev", () => {
+ const yaml = `lockfile_version: '1'
+dependencies:
+- repo_url: https://github.com/owner/repo
+ is_virtual: true
+ is_dev: true
+ depth: 1
+`;
+ const result = parseAPMLockfile(yaml);
+ const dep = result.dependencies[0];
+ expect(dep.is_virtual).toBe(true);
+ expect(dep.is_dev).toBe(true);
+ });
+
+ it("parses virtual package with virtual_path", () => {
+ const yaml = `lockfile_version: '1'
+dependencies:
+- repo_url: https://github.com/owner/mono
+ virtual_path: packages/sub
+ is_virtual: true
+ depth: 2
+ deployed_files:
+ - .github/skills/sub/
+`;
+ const result = parseAPMLockfile(yaml);
+ const dep = result.dependencies[0];
+ expect(dep.virtual_path).toBe("packages/sub");
+ expect(dep.is_virtual).toBe(true);
+ expect(dep.depth).toBe(2);
+ });
+
+ it("parses local dependency with source and local_path", () => {
+ const yaml = `lockfile_version: '1'
+dependencies:
+- repo_url: local
+ source: local
+ local_path: ./my-local-pkg
+ depth: 1
+ deployed_files:
+ - .github/skills/local/
+`;
+ const result = parseAPMLockfile(yaml);
+ const dep = result.dependencies[0];
+ expect(dep.source).toBe("local");
+ expect(dep.local_path).toBe("./my-local-pkg");
+ });
+
+ it("handles deployed_files with plain file paths (no trailing slash)", () => {
+ const yaml = `lockfile_version: '1'
+dependencies:
+- repo_url: https://github.com/owner/repo
+ deployed_files:
+ - .github/copilot-instructions.md
+ - README.md
+`;
+ const result = parseAPMLockfile(yaml);
+ expect(result.dependencies[0].deployed_files).toEqual([".github/copilot-instructions.md", "README.md"]);
+ });
+
+ it("handles multiple fields appearing after deployed_files", () => {
+ const yaml = `lockfile_version: '1'
+dependencies:
+- repo_url: https://github.com/owner/repo
+ host: github.com
+ resolved_commit: abc123
+ depth: 1
+ deployed_files:
+ - .github/skills/foo/
+ resolved_ref: main
+ package_type: generic
+`;
+ const result = parseAPMLockfile(yaml);
+ const dep = result.dependencies[0];
+ // After deployed_files block, parser should resume dep_item and pick up remaining keys
+ expect(dep.deployed_files).toEqual([".github/skills/foo/"]);
+ expect(dep.resolved_ref).toBe("main");
+ expect(dep.package_type).toBe("generic");
+ });
+});
+
+// ---------------------------------------------------------------------------
+// collectDeployedFiles
+// ---------------------------------------------------------------------------
+
+describe("collectDeployedFiles", () => {
+ it("deduplicates files across dependencies", () => {
+ const lockfile = {
+ lockfile_version: "1",
+ generated_at: null,
+ apm_version: null,
+ pack: {},
+ dependencies: [
+ { ...makeEmptyDep(), repo_url: "a", deployed_files: ["file1.txt", "file2.txt"] },
+ { ...makeEmptyDep(), repo_url: "b", deployed_files: ["file2.txt", "file3.txt"] },
+ ],
+ };
+ const { uniqueFiles, depFileMap } = collectDeployedFiles(lockfile);
+ expect(uniqueFiles).toEqual(["file1.txt", "file2.txt", "file3.txt"]);
+ expect(depFileMap["a"]).toEqual(["file1.txt", "file2.txt"]);
+ expect(depFileMap["b"]).toEqual(["file2.txt", "file3.txt"]);
+ });
+
+ it("preserves insertion order (mirrors Python seen set logic)", () => {
+ const lockfile = {
+ lockfile_version: "1",
+ generated_at: null,
+ apm_version: null,
+ pack: {},
+ dependencies: [
+ { ...makeEmptyDep(), repo_url: "a", deployed_files: ["z.txt", "a.txt"] },
+ { ...makeEmptyDep(), repo_url: "b", deployed_files: ["m.txt"] },
+ ],
+ };
+ const { uniqueFiles } = collectDeployedFiles(lockfile);
+ expect(uniqueFiles).toEqual(["z.txt", "a.txt", "m.txt"]);
+ });
+
+ it("uses virtual_path in dep key for virtual packages", () => {
+ const lockfile = {
+ lockfile_version: "1",
+ generated_at: null,
+ apm_version: null,
+ pack: {},
+ dependencies: [
+ {
+ ...makeEmptyDep(),
+ repo_url: "https://github.com/owner/mono",
+ is_virtual: true,
+ virtual_path: "packages/sub",
+ deployed_files: ["skill/"],
+ },
+ ],
+ };
+ const { depFileMap } = collectDeployedFiles(lockfile);
+ expect(depFileMap["https://github.com/owner/mono/packages/sub"]).toEqual(["skill/"]);
+ });
+
+ it("uses local_path as key for local packages", () => {
+ const lockfile = {
+ lockfile_version: "1",
+ generated_at: null,
+ apm_version: null,
+ pack: {},
+ dependencies: [
+ {
+ ...makeEmptyDep(),
+ repo_url: "local",
+ source: "local",
+ local_path: "./my-pkg",
+ deployed_files: ["skill/"],
+ },
+ ],
+ };
+ const { depFileMap } = collectDeployedFiles(lockfile);
+ expect(depFileMap["./my-pkg"]).toEqual(["skill/"]);
+ });
+
+ it("omits empty deployed_files from depFileMap", () => {
+ const lockfile = {
+ lockfile_version: "1",
+ generated_at: null,
+ apm_version: null,
+ pack: {},
+ dependencies: [{ ...makeEmptyDep(), repo_url: "a", deployed_files: [] }],
+ };
+ const { uniqueFiles, depFileMap } = collectDeployedFiles(lockfile);
+ expect(uniqueFiles).toHaveLength(0);
+ expect(Object.keys(depFileMap)).toHaveLength(0);
+ });
+});
+
+// Helper used in collectDeployedFiles tests
+function makeEmptyDep() {
+ return {
+ repo_url: "",
+ host: null,
+ resolved_commit: null,
+ resolved_ref: null,
+ version: null,
+ virtual_path: null,
+ is_virtual: false,
+ depth: 1,
+ resolved_by: null,
+ package_type: null,
+ deployed_files: [],
+ source: null,
+ local_path: null,
+ content_hash: null,
+ is_dev: false,
+ };
+}
+
+// ---------------------------------------------------------------------------
+// findBundleFile
+// ---------------------------------------------------------------------------
+
+describe("findBundleFile", () => {
+ let tempDir;
+
+ beforeEach(() => {
+ tempDir = makeTempDir();
+ vi.clearAllMocks();
+ global.core = mockCore;
+ });
+ afterEach(() => removeTempDir(tempDir));
+
+ it("finds a single tar.gz file in the bundle directory", () => {
+ writeFile(tempDir, "my-package-1.0.0.tar.gz", "fake-archive");
+ const result = findBundleFile(tempDir);
+ expect(result).toBe(path.join(tempDir, "my-package-1.0.0.tar.gz"));
+ });
+
+ it("throws when directory does not exist", () => {
+ expect(() => findBundleFile("/nonexistent/path/xyz")).toThrow(/not found/);
+ });
+
+ it("throws when no tar.gz file exists", () => {
+ writeFile(tempDir, "readme.txt", "not a bundle");
+ expect(() => findBundleFile(tempDir)).toThrow(/No \*.tar\.gz bundle found/);
+ });
+
+ it("uses first file and warns when multiple bundles are present", () => {
+ writeFile(tempDir, "pkg-1.0.0.tar.gz", "archive-1");
+ writeFile(tempDir, "pkg-2.0.0.tar.gz", "archive-2");
+ const result = findBundleFile(tempDir);
+ expect(result).toMatch(/\.tar\.gz$/);
+ expect(mockCore.warning).toHaveBeenCalledWith(expect.stringContaining("Multiple bundles found"));
+ });
+});
+
+// ---------------------------------------------------------------------------
+// findSourceDir
+// ---------------------------------------------------------------------------
+
+describe("findSourceDir", () => {
+ let tempDir;
+
+ beforeEach(() => {
+ tempDir = makeTempDir();
+ vi.clearAllMocks();
+ global.core = mockCore;
+ });
+ afterEach(() => removeTempDir(tempDir));
+
+ it("returns the single subdirectory when the archive has one top-level dir", () => {
+ const inner = path.join(tempDir, "my-package-1.0.0");
+ fs.mkdirSync(inner);
+ const result = findSourceDir(tempDir);
+ expect(result).toBe(inner);
+ });
+
+ it("returns the extraction root when multiple entries exist", () => {
+ fs.mkdirSync(path.join(tempDir, "dir-a"));
+ fs.mkdirSync(path.join(tempDir, "dir-b"));
+ const result = findSourceDir(tempDir);
+ expect(result).toBe(tempDir);
+ });
+
+ it("returns the extraction root when only files exist (no subdirectory)", () => {
+ writeFile(tempDir, "apm.lock.yaml", "lockfile");
+ const result = findSourceDir(tempDir);
+ expect(result).toBe(tempDir);
+ });
+});
+
+// ---------------------------------------------------------------------------
+// findLockfile
+// ---------------------------------------------------------------------------
+
+describe("findLockfile", () => {
+ let tempDir;
+
+ beforeEach(() => {
+ tempDir = makeTempDir();
+ vi.clearAllMocks();
+ global.core = mockCore;
+ });
+ afterEach(() => removeTempDir(tempDir));
+
+ it("finds apm.lock.yaml", () => {
+ writeFile(tempDir, "apm.lock.yaml", "content");
+ const result = findLockfile(tempDir);
+ expect(result).toBe(path.join(tempDir, "apm.lock.yaml"));
+ expect(mockCore.warning).not.toHaveBeenCalled();
+ });
+
+ it("throws when apm.lock.yaml does not exist", () => {
+ expect(() => findLockfile(tempDir)).toThrow(/apm\.lock\.yaml not found/);
+ });
+
+ it("throws when only legacy apm.lock exists (not supported)", () => {
+ writeFile(tempDir, "apm.lock", "content");
+ expect(() => findLockfile(tempDir)).toThrow(/apm\.lock\.yaml not found/);
+ });
+});
+
+// ---------------------------------------------------------------------------
+// verifyBundleContents
+// ---------------------------------------------------------------------------
+
+describe("verifyBundleContents", () => {
+ let tempDir;
+
+ beforeEach(() => {
+ tempDir = makeTempDir();
+ vi.clearAllMocks();
+ global.core = mockCore;
+ });
+ afterEach(() => removeTempDir(tempDir));
+
+ it("passes when all files exist in the bundle", () => {
+ writeFile(tempDir, ".github/skills/foo/skill.md");
+ writeFile(tempDir, ".claude/skills/foo/skill.md");
+ expect(() => verifyBundleContents(tempDir, [".github/skills/foo/skill.md", ".claude/skills/foo/skill.md"])).not.toThrow();
+ expect(mockCore.info).toHaveBeenCalledWith(expect.stringContaining("verification passed"));
+ });
+
+ it("throws when a file listed in deployed_files is missing", () => {
+ writeFile(tempDir, ".github/skills/foo/skill.md");
+ // .claude/skills/foo/skill.md is missing
+ expect(() => verifyBundleContents(tempDir, [".github/skills/foo/skill.md", ".claude/skills/foo/skill.md"])).toThrow(/Bundle verification failed/);
+ });
+
+ it("passes for directory entries (path ending with /)", () => {
+ // A directory itself counts as existing
+ fs.mkdirSync(path.join(tempDir, ".github", "skills", "foo"), { recursive: true });
+ expect(() => verifyBundleContents(tempDir, [".github/skills/foo/"])).not.toThrow();
+ });
+
+ it("passes for empty deployed_files list", () => {
+ expect(() => verifyBundleContents(tempDir, [])).not.toThrow();
+ });
+});
+
+// ---------------------------------------------------------------------------
+// assertSafePath
+// ---------------------------------------------------------------------------
+
+describe("assertSafePath", () => {
+ it("accepts valid relative paths", () => {
+ expect(() => assertSafePath(".github/skills/foo/skill.md")).not.toThrow();
+ expect(() => assertSafePath("README.md")).not.toThrow();
+ expect(() => assertSafePath("some/nested/dir/file.txt")).not.toThrow();
+ });
+
+ it("rejects absolute paths", () => {
+ expect(() => assertSafePath("/etc/passwd")).toThrow(/absolute path/i);
+ expect(() => assertSafePath("/tmp/attack")).toThrow(/absolute path/i);
+ });
+
+ it("rejects path traversal with ..", () => {
+ expect(() => assertSafePath("../outside")).toThrow(/path-traversal/i);
+ expect(() => assertSafePath("safe/../../../etc/passwd")).toThrow(/path-traversal/i);
+ });
+});
+
+// ---------------------------------------------------------------------------
+// assertDestInsideOutput
+// ---------------------------------------------------------------------------
+
+describe("assertDestInsideOutput", () => {
+ it("accepts paths inside the output directory", () => {
+ const output = path.resolve("/tmp/test-output");
+ expect(() => assertDestInsideOutput(output + "/subdir/file.txt", output)).not.toThrow();
+ expect(() => assertDestInsideOutput(output + "/nested/deep/file.txt", output)).not.toThrow();
+ });
+
+ it("rejects paths that escape the output directory", () => {
+ const output = path.resolve("/tmp/test-output");
+ expect(() => assertDestInsideOutput("/tmp/other/file.txt", output)).toThrow(/escapes/i);
+ expect(() => assertDestInsideOutput("/etc/passwd", output)).toThrow(/escapes/i);
+ });
+});
+
+// ---------------------------------------------------------------------------
+// copyDirRecursive
+// ---------------------------------------------------------------------------
+
+describe("copyDirRecursive", () => {
+ let srcDir;
+ let destDir;
+
+ beforeEach(() => {
+ srcDir = makeTempDir();
+ destDir = makeTempDir();
+ vi.clearAllMocks();
+ global.core = mockCore;
+ });
+ afterEach(() => {
+ removeTempDir(srcDir);
+ removeTempDir(destDir);
+ });
+
+ it("copies all files from source to destination", () => {
+ writeFile(srcDir, "file1.txt", "a");
+ writeFile(srcDir, "subdir/file2.txt", "b");
+ writeFile(srcDir, "subdir/nested/file3.txt", "c");
+
+ const count = copyDirRecursive(srcDir, destDir);
+ expect(count).toBe(3);
+ expect(fs.existsSync(path.join(destDir, "file1.txt"))).toBe(true);
+ expect(fs.existsSync(path.join(destDir, "subdir", "file2.txt"))).toBe(true);
+ expect(fs.existsSync(path.join(destDir, "subdir", "nested", "file3.txt"))).toBe(true);
+ });
+
+ it("preserves file content", () => {
+ writeFile(srcDir, "hello.txt", "Hello, World!");
+ copyDirRecursive(srcDir, destDir);
+ const content = fs.readFileSync(path.join(destDir, "hello.txt"), "utf-8");
+ expect(content).toBe("Hello, World!");
+ });
+
+ it("skips symbolic links with a warning", () => {
+ writeFile(srcDir, "real.txt", "real");
+ // Create a symlink (may not work on all platforms but is tested here)
+ try {
+ fs.symlinkSync(path.join(srcDir, "real.txt"), path.join(srcDir, "link.txt"));
+ copyDirRecursive(srcDir, destDir);
+ // The symlink should not be copied
+ expect(fs.existsSync(path.join(destDir, "link.txt"))).toBe(false);
+ expect(mockCore.warning).toHaveBeenCalledWith(expect.stringContaining("symlink"));
+ } catch {
+ // Symlink creation may fail in some environments – skip
+ }
+ });
+
+ it("returns 0 for an empty source directory", () => {
+ const count = copyDirRecursive(srcDir, destDir);
+ expect(count).toBe(0);
+ });
+});
+
+// ---------------------------------------------------------------------------
+// listDirRecursive
+// ---------------------------------------------------------------------------
+
+describe("listDirRecursive", () => {
+ let tempDir;
+
+ beforeEach(() => {
+ tempDir = makeTempDir();
+ });
+ afterEach(() => removeTempDir(tempDir));
+
+ it("lists all files recursively", () => {
+ writeFile(tempDir, "a.txt");
+ writeFile(tempDir, "sub/b.txt");
+ writeFile(tempDir, "sub/deep/c.txt");
+
+ const files = listDirRecursive(tempDir);
+ expect(files).toContain("a.txt");
+ expect(files).toContain("sub/b.txt");
+ expect(files).toContain("sub/deep/c.txt");
+ });
+
+ it("returns empty array for empty directory", () => {
+ expect(listDirRecursive(tempDir)).toHaveLength(0);
+ });
+
+ it("returns empty array for non-existent directory", () => {
+ expect(listDirRecursive("/nonexistent/xyz")).toHaveLength(0);
+ });
+});
+
+// ---------------------------------------------------------------------------
+// Full unpackBundle integration test (using real filesystem)
+// ---------------------------------------------------------------------------
+
+describe("unpackBundle – integration", () => {
+ let bundleBaseDir;
+ let outputDir;
+
+ beforeEach(() => {
+ bundleBaseDir = makeTempDir();
+ outputDir = makeTempDir();
+ vi.clearAllMocks();
+ global.core = mockCore;
+ global.exec = mockExec;
+ });
+ afterEach(() => {
+ removeTempDir(bundleBaseDir);
+ removeTempDir(outputDir);
+ });
+
+ /**
+ * Build a fake extracted bundle directory inside bundleBaseDir:
+ * bundleBaseDir/
+ * fake-archive.tar.gz (empty placeholder – exec mock skips real extraction)
+ * extracted/
+ * pkg-1.0.0/
+ * apm.lock.yaml
+ * .github/skills/my-skill/prompt.md
+ * .claude/skills/my-skill/CLAUDE.md
+ *
+ * The exec mock simulates tar extraction by creating the same structure in the
+ * tempDir that unpackBundle uses.
+ */
+ function buildFakeBundle({
+ repoUrl = "https://github.com/owner/my-skill",
+ files = [
+ { path: ".github/skills/my-skill/prompt.md", content: "# My Skill" },
+ { path: ".claude/skills/my-skill/CLAUDE.md", content: "# Claude Skill" },
+ ],
+ deployedFiles = [".github/skills/my-skill/", ".claude/skills/my-skill/"],
+ } = {}) {
+ // Write the placeholder tar.gz so findBundleFile succeeds
+ fs.writeFileSync(path.join(bundleBaseDir, "my-package-1.0.0.tar.gz"), "fake");
+
+ // Build the lockfile content
+ const fileLines = deployedFiles.map(f => ` - ${f}`).join("\n");
+ const lockfileContent = `lockfile_version: '1'
+generated_at: '2024-01-15T10:00:00.000000+00:00'
+apm_version: 0.8.5
+dependencies:
+- repo_url: ${repoUrl}
+ host: github.com
+ resolved_commit: abc123def456
+ resolved_ref: main
+ depth: 1
+ package_type: generic
+ deployed_files:
+${fileLines}
+pack:
+ target: claude
+ format: apm
+`;
+
+ // The exec mock will be called with tar -xzf -C
+ // We intercept it to write our fake extracted structure into tempDir
+ mockExec.exec.mockImplementation(async (_cmd, args) => {
+ // args: ['-xzf', bundlePath, '-C', tempDir]
+ const tempDir = args[3];
+ const innerDir = path.join(tempDir, "my-package-1.0.0");
+ fs.mkdirSync(innerDir, { recursive: true });
+
+ // Write lockfile
+ fs.writeFileSync(path.join(innerDir, "apm.lock.yaml"), lockfileContent);
+
+ // Write deployed files
+ for (const f of files) {
+ writeFile(innerDir, f.path.replace(/\/$/, "") + (f.path.endsWith("/") ? "/placeholder" : ""), f.content);
+ }
+
+ // Write directory structure for directory entries in deployedFiles
+ for (const df of deployedFiles) {
+ if (df.endsWith("/")) {
+ const dirPath = df.replace(/\/$/, "");
+ fs.mkdirSync(path.join(innerDir, dirPath), { recursive: true });
+ // Write at least one file into each directory
+ const placeholder = path.join(innerDir, dirPath, "skill.md");
+ if (!fs.existsSync(placeholder)) {
+ fs.writeFileSync(placeholder, "# placeholder");
+ }
+ }
+ }
+ });
+ }
+
+ it("unpacks a bundle and deploys files to output directory", async () => {
+ buildFakeBundle();
+
+ const result = await unpackBundle({ bundleDir: bundleBaseDir, outputDir });
+
+ expect(result.files).toContain(".github/skills/my-skill/");
+ expect(result.files).toContain(".claude/skills/my-skill/");
+ expect(result.verified).toBe(true);
+ expect(result.packMeta.target).toBe("claude");
+
+ // Verify files were deployed
+ expect(fs.existsSync(path.join(outputDir, ".github", "skills", "my-skill"))).toBe(true);
+ expect(fs.existsSync(path.join(outputDir, ".claude", "skills", "my-skill"))).toBe(true);
+ });
+
+ it("dry-run resolves files without copying", async () => {
+ buildFakeBundle();
+
+ const result = await unpackBundle({ bundleDir: bundleBaseDir, outputDir, dryRun: true });
+
+ expect(result.files).toContain(".github/skills/my-skill/");
+ expect(result.files).toContain(".claude/skills/my-skill/");
+ // Nothing should have been deployed
+ expect(fs.existsSync(path.join(outputDir, ".github"))).toBe(false);
+ });
+
+ it("throws when bundle directory is empty", async () => {
+ await expect(unpackBundle({ bundleDir: bundleBaseDir, outputDir })).rejects.toThrow(/No \*.tar\.gz bundle found/);
+ });
+
+ it("throws when lockfile is missing from bundle", async () => {
+ fs.writeFileSync(path.join(bundleBaseDir, "broken.tar.gz"), "fake");
+
+ mockExec.exec.mockImplementation(async (_cmd, args) => {
+ const tempDir = args[3];
+ const innerDir = path.join(tempDir, "my-package-1.0.0");
+ fs.mkdirSync(innerDir, { recursive: true });
+ // No lockfile written – this should trigger an error
+ });
+
+ await expect(unpackBundle({ bundleDir: bundleBaseDir, outputDir })).rejects.toThrow(/apm\.lock\.yaml not found/);
+ });
+
+ it("handles plain file entries (non-directory deployed_files)", async () => {
+ buildFakeBundle({
+ deployedFiles: [".github/copilot-instructions.md"],
+ files: [{ path: ".github/copilot-instructions.md", content: "# Instructions" }],
+ });
+
+ mockExec.exec.mockImplementation(async (_cmd, args) => {
+ const tempDir = args[3];
+ const innerDir = path.join(tempDir, "my-package-1.0.0");
+ fs.mkdirSync(innerDir, { recursive: true });
+
+ const lockfileContent = `lockfile_version: '1'
+dependencies:
+- repo_url: https://github.com/owner/repo
+ deployed_files:
+ - .github/copilot-instructions.md
+`;
+ fs.writeFileSync(path.join(innerDir, "apm.lock.yaml"), lockfileContent);
+ writeFile(innerDir, ".github/copilot-instructions.md", "# Instructions");
+ });
+
+ const result = await unpackBundle({ bundleDir: bundleBaseDir, outputDir });
+ expect(result.files).toContain(".github/copilot-instructions.md");
+ expect(fs.existsSync(path.join(outputDir, ".github", "copilot-instructions.md"))).toBe(true);
+ });
+
+ it("throws when bundle contains only legacy apm.lock (not supported)", async () => {
+ fs.writeFileSync(path.join(bundleBaseDir, "pkg.tar.gz"), "fake");
+
+ mockExec.exec.mockImplementation(async (_cmd, args) => {
+ const tempDir = args[3];
+ const innerDir = path.join(tempDir, "pkg-1.0.0");
+ fs.mkdirSync(innerDir, { recursive: true });
+ // Only write the legacy lockfile — should be rejected
+ fs.writeFileSync(path.join(innerDir, "apm.lock"), "lockfile_version: '1'\ndependencies:\n");
+ });
+
+ await expect(unpackBundle({ bundleDir: bundleBaseDir, outputDir })).rejects.toThrow(/apm\.lock\.yaml not found/);
+ });
+
+ it("skips verification when skipVerify is true", async () => {
+ buildFakeBundle({ deployedFiles: [".github/skills/foo/"] });
+
+ // Simulate a bundle where the file is missing but skipVerify lets it through
+ mockExec.exec.mockImplementation(async (_cmd, args) => {
+ const tempDir = args[3];
+ const innerDir = path.join(tempDir, "my-package-1.0.0");
+ fs.mkdirSync(innerDir, { recursive: true });
+
+ const lockfileContent = `lockfile_version: '1'
+dependencies:
+- repo_url: https://github.com/owner/repo
+ deployed_files:
+ - .github/skills/missing-file/
+`;
+ fs.writeFileSync(path.join(innerDir, "apm.lock.yaml"), lockfileContent);
+ // Intentionally NOT creating .github/skills/missing-file/
+ });
+
+ const result = await unpackBundle({ bundleDir: bundleBaseDir, outputDir, skipVerify: true });
+ expect(result.verified).toBe(false);
+ expect(result.skippedCount).toBe(1); // missing entry is skipped
+ });
+});
+
+// ---------------------------------------------------------------------------
+// Edge cases for YAML parser
+// ---------------------------------------------------------------------------
+
+describe("parseAPMLockfile – edge cases", () => {
+ it("handles YAML with Windows-style line endings (CRLF)", () => {
+ const yaml = "lockfile_version: '1'\r\ngenerated_at: '2024-01-15'\r\ndependencies:\r\n";
+ const result = parseAPMLockfile(yaml);
+ // CRLF lines won't match our patterns cleanly, but should not throw
+ expect(result).toBeDefined();
+ });
+
+ it("handles quoted values with internal spaces", () => {
+ const yaml = `lockfile_version: '1 (patched)'
+dependencies:
+`;
+ const result = parseAPMLockfile(yaml);
+ expect(result.lockfile_version).toBe("1 (patched)");
+ });
+
+ it("handles multiple dependencies with pack block at the end", () => {
+ const yaml = `lockfile_version: '1'
+dependencies:
+- repo_url: https://github.com/a/pkg
+ deployed_files:
+ - skill-a/
+- repo_url: https://github.com/b/pkg
+ deployed_files:
+ - skill-b/
+pack:
+ target: all
+ format: apm
+`;
+ const result = parseAPMLockfile(yaml);
+ expect(result.dependencies).toHaveLength(2);
+ expect(result.pack.target).toBe("all");
+ });
+
+ it("does not modify deployed_files paths (preserves trailing slash)", () => {
+ const yaml = `lockfile_version: '1'
+dependencies:
+- repo_url: https://github.com/owner/repo
+ deployed_files:
+ - .github/skills/my-skill/
+ - plain-file.md
+`;
+ const result = parseAPMLockfile(yaml);
+ expect(result.dependencies[0].deployed_files[0]).toBe(".github/skills/my-skill/");
+ expect(result.dependencies[0].deployed_files[1]).toBe("plain-file.md");
+ });
+});
diff --git a/actions/setup/js/run_apm_unpack.cjs b/actions/setup/js/run_apm_unpack.cjs
new file mode 100644
index 00000000000..ab2ac6e93b9
--- /dev/null
+++ b/actions/setup/js/run_apm_unpack.cjs
@@ -0,0 +1,65 @@
+// @ts-check
+/**
+ * run_apm_unpack.cjs
+ *
+ * Standalone entry-point for apm_unpack.cjs used in CI integration tests.
+ * Sets up lightweight CJS-compatible shims for the @actions/* globals expected
+ * by apm_unpack.cjs, then calls main().
+ *
+ * The @actions/core v3+ package is ESM-only and cannot be loaded via require().
+ * The shims below reproduce the subset of the API that apm_unpack.cjs uses:
+ * core.info / core.warning / core.error / core.setFailed / core.setOutput
+ * exec.exec(cmd, args, options)
+ *
+ * Environment variables (consumed by apm_unpack.main):
+ * APM_BUNDLE_DIR – directory containing the *.tar.gz bundle
+ * OUTPUT_DIR – destination directory for deployed files
+ *
+ * Usage:
+ * node actions/setup/js/run_apm_unpack.cjs
+ */
+
+"use strict";
+
+const { spawnSync } = require("child_process");
+const { setupGlobals } = require("./setup_globals.cjs");
+const { main } = require("./apm_unpack.cjs");
+
+// Minimal shim for @actions/core — only the methods used by apm_unpack.cjs.
+const core = {
+ info: msg => console.log(msg),
+ warning: msg => console.warn(`::warning::${msg}`),
+ error: msg => console.error(`::error::${msg}`),
+ setFailed: msg => {
+ console.error(`::error::${msg}`);
+ process.exitCode = 1;
+ },
+ setOutput: (name, value) => console.log(`::set-output name=${name}::${value}`),
+};
+
+// Minimal shim for @actions/exec — only exec() is used by apm_unpack.cjs.
+const exec = {
+ exec: async (cmd, args = [], opts = {}) => {
+ const result = spawnSync(cmd, args, { stdio: "inherit", ...opts });
+ if (result.status !== 0) {
+ throw new Error(`Command failed: ${cmd} ${args.join(" ")} (exit ${result.status})`);
+ }
+ return result.status;
+ },
+};
+
+// Wire shims into globals so apm_unpack.cjs can use them.
+// Passing empty objects for github (GraphQL client) and context (event payload)
+// because apm_unpack does not use GitHub API or event metadata.
+setupGlobals(
+ core, // logging, outputs, inputs
+ {}, // @actions/github – not used by apm_unpack
+ {}, // GitHub Actions event context – not used by apm_unpack
+ exec, // runs `tar -xzf`
+ {} // @actions/io – not used by apm_unpack
+);
+
+main().catch(err => {
+ console.error(`::error::${err.message}`);
+ process.exit(1);
+});
diff --git a/actions/setup/js/tsconfig.json b/actions/setup/js/tsconfig.json
index 6acdfe964f1..0f9de57f780 100644
--- a/actions/setup/js/tsconfig.json
+++ b/actions/setup/js/tsconfig.json
@@ -32,5 +32,5 @@
"typeRoots": ["./node_modules/@types", "./types"]
},
"include": ["*.cjs", "types/*.d.ts"],
- "exclude": ["../../../node_modules", "../../../dist", "*.test.cjs"]
+ "exclude": ["../../../node_modules", "../../../dist", "*.test.cjs", "run_apm_unpack.cjs"]
}
diff --git a/pkg/workflow/apm_dependencies.go b/pkg/workflow/apm_dependencies.go
index 8a5682f1304..604e492ba0b 100644
--- a/pkg/workflow/apm_dependencies.go
+++ b/pkg/workflow/apm_dependencies.go
@@ -195,6 +195,10 @@ func GenerateAPMPackStep(apmDeps *APMDependenciesInfo, target string, data *Work
// GenerateAPMRestoreStep generates the GitHub Actions step that restores APM packages
// from a pre-packed bundle in the agent job.
//
+// The restore step uses the JavaScript implementation in apm_unpack.cjs (actions/setup/js)
+// via actions/github-script, removing the dependency on microsoft/apm-action for
+// the unpack phase. Packing still uses microsoft/apm-action in the dedicated APM job.
+//
// Parameters:
// - apmDeps: APM dependency configuration extracted from frontmatter
// - data: WorkflowData used for action pin resolution
@@ -206,20 +210,19 @@ func GenerateAPMRestoreStep(apmDeps *APMDependenciesInfo, data *WorkflowData) Gi
return GitHubActionStep{}
}
- apmDepsLog.Printf("Generating APM restore step (isolated=%v)", apmDeps.Isolated)
-
- actionRef := GetActionPin("microsoft/apm-action")
+ apmDepsLog.Printf("Generating APM restore step using JS unpacker (isolated=%v)", apmDeps.Isolated)
lines := []string{
" - name: Restore APM dependencies",
- " uses: " + actionRef,
+ " uses: " + GetActionPin("actions/github-script"),
+ " env:",
+ " APM_BUNDLE_DIR: /tmp/gh-aw/apm-bundle",
" with:",
- " bundle: /tmp/gh-aw/apm-bundle/*.tar.gz",
- " apm-version: ${{ env.GH_AW_INFO_APM_VERSION }}",
- }
-
- if apmDeps.Isolated {
- lines = append(lines, " isolated: 'true'")
+ " script: |",
+ " const { setupGlobals } = require('" + SetupActionDestination + "/setup_globals.cjs');",
+ " setupGlobals(core, github, context, exec, io);",
+ " const { main } = require('" + SetupActionDestination + "/apm_unpack.cjs');",
+ " await main();",
}
return GitHubActionStep(lines)
diff --git a/pkg/workflow/apm_dependencies_compilation_test.go b/pkg/workflow/apm_dependencies_compilation_test.go
index 9c84839e305..cfe8dfb4fba 100644
--- a/pkg/workflow/apm_dependencies_compilation_test.go
+++ b/pkg/workflow/apm_dependencies_compilation_test.go
@@ -74,8 +74,10 @@ Test with a single APM dependency
"Lock file should download APM bundle in agent job")
assert.Contains(t, lockContent, "Restore APM dependencies",
"Lock file should contain APM restore step in agent job")
- assert.Contains(t, lockContent, "bundle: /tmp/gh-aw/apm-bundle/*.tar.gz",
- "Lock file should restore from bundle path")
+ assert.Contains(t, lockContent, "APM_BUNDLE_DIR: /tmp/gh-aw/apm-bundle",
+ "Lock file should configure bundle directory for JS unpacker")
+ assert.Contains(t, lockContent, "apm_unpack.cjs",
+ "Lock file should use JS unpacker script")
// Old install step should NOT appear
assert.NotContains(t, lockContent, "Install APM dependencies",
@@ -200,9 +202,9 @@ Test with isolated APM dependencies
"Lock file should contain APM pack step")
assert.Contains(t, lockContent, "Restore APM dependencies",
"Lock file should contain APM restore step")
- // Restore step should include isolated: true because frontmatter says so
- assert.Contains(t, lockContent, "isolated: 'true'",
- "Lock file restore step should include isolated flag")
+ // Restore step uses the JS unpacker (isolated flag not required for JS implementation)
+ assert.Contains(t, lockContent, "apm_unpack.cjs",
+ "Lock file restore step should use the JS unpacker")
}
func TestAPMDependenciesCompilationClaudeEngineTarget(t *testing.T) {
diff --git a/pkg/workflow/apm_dependencies_test.go b/pkg/workflow/apm_dependencies_test.go
index f556897ffac..8bc7081a22e 100644
--- a/pkg/workflow/apm_dependencies_test.go
+++ b/pkg/workflow/apm_dependencies_test.go
@@ -439,28 +439,31 @@ func TestGenerateAPMRestoreStep(t *testing.T) {
apmDeps: &APMDependenciesInfo{Packages: []string{"microsoft/apm-sample-package"}, Isolated: false},
expectedContains: []string{
"Restore APM dependencies",
- "microsoft/apm-action",
- "bundle: /tmp/gh-aw/apm-bundle/*.tar.gz",
- "apm-version: ${{ env.GH_AW_INFO_APM_VERSION }}",
+ "actions/github-script",
+ "APM_BUNDLE_DIR: /tmp/gh-aw/apm-bundle",
+ "apm_unpack.cjs",
+ "await main();",
},
- expectedNotContains: []string{"isolated"},
+ expectedNotContains: []string{"microsoft/apm-action"},
},
{
name: "Isolated restore step",
apmDeps: &APMDependenciesInfo{Packages: []string{"microsoft/apm-sample-package"}, Isolated: true},
expectedContains: []string{
"Restore APM dependencies",
- "microsoft/apm-action",
- "bundle: /tmp/gh-aw/apm-bundle/*.tar.gz",
- "isolated: 'true'",
- "apm-version: ${{ env.GH_AW_INFO_APM_VERSION }}",
+ "actions/github-script",
+ "APM_BUNDLE_DIR: /tmp/gh-aw/apm-bundle",
+ "apm_unpack.cjs",
+ "await main();",
},
+ expectedNotContains: []string{"microsoft/apm-action"},
},
{
- name: "Custom APM version still uses env var reference in step",
+ name: "Custom APM version still uses JS unpacker (version not needed for unpack)",
apmDeps: &APMDependenciesInfo{Packages: []string{"microsoft/apm-sample-package"}, Version: "v1.0.0"},
expectedContains: []string{
- "apm-version: ${{ env.GH_AW_INFO_APM_VERSION }}",
+ "apm_unpack.cjs",
+ "actions/github-script",
},
},
}