Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
5 changes: 5 additions & 0 deletions docs/dependencies.md
Original file line number Diff line number Diff line change
Expand Up @@ -528,10 +528,15 @@ dependencies:
version: "1.2.0"
depth: 2
resolved_by: "microsoft/apm-sample-package"
mcp_servers:
- acme-kb
- github
```

The `deployed_files` field tracks exactly which files APM placed in your project. This enables safe cleanup on `apm uninstall` and `apm prune` — only tracked files are removed.

The `mcp_servers` field records the MCP dependency references (e.g. `io.github.github/github-mcp-server`) for servers currently managed by APM. It is used to detect and clean up stale servers when dependencies change.

### How It Works

1. **First install**: APM resolves dependencies, downloads packages, and writes `apm.lock`
Expand Down
1 change: 1 addition & 0 deletions docs/manifest-schema.md
Original file line number Diff line number Diff line change
Expand Up @@ -360,6 +360,7 @@ dependencies: # YAML list (not a map)
depth: <int> # 1 = direct, 2+ = transitive
resolved_by: <string> # Parent dependency (transitive only)
deployed_files: <list<string>> # Workspace-relative paths of installed files
mcp_servers: <list<string>> # MCP dependency references managed by APM (OPTIONAL, e.g. "io.github.github/github-mcp-server")
```

### 6.2. Resolver Behaviour
Expand Down
249 changes: 246 additions & 3 deletions src/apm_cli/cli.py
Original file line number Diff line number Diff line change
Expand Up @@ -754,6 +754,17 @@ def install(ctx, packages, runtime, exclude, only, update, dry_run, force, verbo
apm_count = 0
prompt_count = 0
agent_count = 0

# Capture old MCP servers from lockfile BEFORE _install_apm_dependencies
# regenerates it (which drops the mcp_servers field).
# We always read this — even when --only=apm — so we can restore the
# field after the lockfile is regenerated by the APM install step.
old_mcp_servers: builtins.set = builtins.set()
_lock_path = Path.cwd() / "apm.lock"
_existing_lock = LockFile.read(_lock_path)
if _existing_lock:
old_mcp_servers = builtins.set(_existing_lock.mcp_servers)

if should_install_apm and apm_deps:
if not APM_DEPS_AVAILABLE:
_rich_error("APM dependency system not available")
Expand All @@ -774,6 +785,12 @@ def install(ctx, packages, runtime, exclude, only, update, dry_run, force, verbo
elif should_install_apm and not apm_deps:
_rich_info("No APM dependencies found in apm.yml")

# When --update is used, package files on disk may have changed.
# Clear the parse cache so transitive MCP collection reads fresh data.
if update:
from apm_cli.models.apm_package import clear_apm_yml_cache
clear_apm_yml_cache()

# Collect transitive MCP dependencies from resolved APM packages
apm_modules_path = Path.cwd() / "apm_modules"
if should_install_mcp and apm_modules_path.exists():
Expand All @@ -785,10 +802,28 @@ def install(ctx, packages, runtime, exclude, only, update, dry_run, force, verbo

# Continue with MCP installation (existing logic)
mcp_count = 0
new_mcp_servers: builtins.set = builtins.set()
if should_install_mcp and mcp_deps:
mcp_count = _install_mcp_dependencies(mcp_deps, runtime, exclude, verbose)
new_mcp_servers = _get_mcp_dep_names(mcp_deps)

# Remove stale MCP servers that are no longer needed
stale_servers = old_mcp_servers - new_mcp_servers
if stale_servers:
_remove_stale_mcp_servers(stale_servers, runtime, exclude)

# Persist the new MCP server set in the lockfile
_update_lockfile_mcp_servers(new_mcp_servers)
elif should_install_mcp and not mcp_deps:
# No MCP deps at all — remove any old APM-managed servers
if old_mcp_servers:
_remove_stale_mcp_servers(old_mcp_servers, runtime, exclude)
_update_lockfile_mcp_servers(builtins.set())
_rich_warning("No MCP dependencies found in apm.yml")
elif not should_install_mcp and old_mcp_servers:
# --only=apm: APM install regenerated the lockfile and dropped
# mcp_servers. Restore the previous set so it is not lost.
_update_lockfile_mcp_servers(old_mcp_servers)

# Show beautiful post-install summary
_rich_blank_line()
Expand Down Expand Up @@ -1236,6 +1271,10 @@ def uninstall(ctx, packages, dry_run):
lockfile_path = get_lockfile_path(Path("."))
lockfile = LockFile.read(lockfile_path)

# Capture MCP servers from lockfile *before* it is mutated/deleted so
# that stale-MCP cleanup can compute the diff even when all deps are removed.
_pre_uninstall_mcp_servers = builtins.set(lockfile.mcp_servers) if lockfile else builtins.set()

if apm_modules_dir.exists():
deleted_pkg_paths: list = []
for package in packages_to_remove:
Expand Down Expand Up @@ -1476,6 +1515,19 @@ def _find_transitive_orphans(lockfile, removed_urls):
instructions_cleaned = result.get("files_removed", 0)

# Phase 2: Re-integrate from remaining installed packages in apm_modules/
# Detect target so we only re-create Claude artefacts when appropriate
from apm_cli.core.target_detection import (
detect_target,
should_integrate_claude,
)
config_target = apm_package.target
detected_target, _ = detect_target(
project_root=project_root,
explicit_target=None,
config_target=config_target,
)
integrate_claude = should_integrate_claude(detected_target)

prompt_integrator = PromptIntegrator()
agent_integrator = AgentIntegrator()
skill_integrator = SkillIntegrator()
Expand Down Expand Up @@ -1511,13 +1563,14 @@ def _find_transitive_orphans(lockfile, removed_urls):
prompt_integrator.integrate_package_prompts(pkg_info, project_root)
if agent_integrator.should_integrate(project_root):
agent_integrator.integrate_package_agents(pkg_info, project_root)
if Path(".claude").exists():
if integrate_claude:
agent_integrator.integrate_package_agents_claude(pkg_info, project_root)
skill_integrator.integrate_package_skill(pkg_info, project_root)
if command_integrator.should_integrate(project_root):
if integrate_claude:
command_integrator.integrate_package_commands(pkg_info, project_root)
hook_integrator_reint.integrate_package_hooks(pkg_info, project_root)
hook_integrator_reint.integrate_package_hooks_claude(pkg_info, project_root)
if integrate_claude:
hook_integrator_reint.integrate_package_hooks_claude(pkg_info, project_root)
instruction_integrator.integrate_package_instructions(pkg_info, project_root)
except Exception:
pass # Best effort re-integration
Expand All @@ -1539,6 +1592,27 @@ def _find_transitive_orphans(lockfile, removed_urls):
if instructions_cleaned > 0:
_rich_info(f"✓ Cleaned up {instructions_cleaned} instruction(s)")

# Clean up stale MCP servers after uninstall
try:
old_mcp_servers = _pre_uninstall_mcp_servers
if old_mcp_servers:
# Recompute MCP deps from remaining packages
apm_modules_path = Path.cwd() / "apm_modules"
remaining_mcp = _collect_transitive_mcp_deps(apm_modules_path, lockfile_path, trust_private=True)
# Also include root-level MCP deps from apm.yml
try:
remaining_root_mcp = apm_package.get_mcp_dependencies()
except Exception:
remaining_root_mcp = []
all_remaining_mcp = _deduplicate_mcp_deps(remaining_root_mcp + remaining_mcp)
new_mcp_servers = _get_mcp_dep_names(all_remaining_mcp)
stale_servers = old_mcp_servers - new_mcp_servers
if stale_servers:
_remove_stale_mcp_servers(stale_servers)
_update_lockfile_mcp_servers(new_mcp_servers)
except Exception:
pass # best-effort MCP cleanup

# Final summary
summary_lines = []
summary_lines.append(
Expand Down Expand Up @@ -1676,6 +1750,9 @@ def download_callback(dep_ref, modules_dir):
deps_to_install = flat_deps.get_installation_list()

# If specific packages were requested, filter to only those
# **and their full transitive dependency subtrees** so that
# sub-deps (and their MCP servers) are installed and recorded
# in the lockfile.
if only_packages:
# Build identity set from user-supplied package specs.
# Accepts any input form: git URLs, FQDN, shorthand.
Expand All @@ -1687,6 +1764,21 @@ def download_callback(dep_ref, modules_dir):
except Exception:
only_identities.add(p)

# Expand the set to include transitive descendants of the
# requested packages so their MCP servers, primitives, etc.
# are correctly installed and written to the lockfile.
tree = dependency_graph.dependency_tree

def _collect_descendants(node):
"""Walk the tree and add every child identity."""
for child in node.children:
only_identities.add(child.dependency_ref.get_identity())
_collect_descendants(child)

for node in tree.nodes.values():
if node.dependency_ref.get_identity() in only_identities:
_collect_descendants(node)

deps_to_install = [
dep for dep in deps_to_install
if dep.get_identity() in only_identities
Expand Down Expand Up @@ -2411,7 +2503,19 @@ def download_callback(dep_ref, modules_dir):
for dep_key, dep_files in package_deployed_files.items():
if dep_key in lockfile.dependencies:
lockfile.dependencies[dep_key].deployed_files = dep_files

lockfile_path = get_lockfile_path(project_root)

# When installing a subset of packages (apm install <pkg>),
# merge new entries into the existing lockfile instead of
# overwriting it — otherwise the uninstalled packages disappear.
if only_packages:
existing = LockFile.read(lockfile_path)
if existing:
for key, dep in lockfile.dependencies.items():
existing.add_dependency(dep)
lockfile = existing

lockfile.save(lockfile_path)
_rich_info(f"Generated apm.lock with {len(lockfile.dependencies)} dependencies")
except Exception as e:
Expand Down Expand Up @@ -2666,6 +2770,145 @@ def _apply_mcp_overlay(server_info_cache: dict, dep) -> None:
)


def _get_mcp_dep_names(mcp_deps: list) -> builtins.set:
"""Extract unique server names from a list of MCP dependencies.

Args:
mcp_deps: List of MCP dependency entries (MCPDependency objects or strings).

Returns:
Set of MCP server names.
"""
names: builtins.set = builtins.set()
for dep in mcp_deps:
if hasattr(dep, "name"):
names.add(dep.name)
elif isinstance(dep, str):
names.add(dep)
return names


def _remove_stale_mcp_servers(
stale_names: builtins.set,
runtime: str = None,
exclude: str = None,
) -> None:
"""Remove MCP server entries that are no longer required by any dependency.

Cleans up runtime configuration files only for the runtimes that were
actually targeted during installation. ``stale_names`` contains MCP
dependency references (e.g. ``"io.github.github/github-mcp-server"``).
For Copilot CLI and Codex, config keys are derived from the last path
segment, so we match against both the full reference and the short name.

Args:
stale_names: Set of MCP dependency references to remove.
runtime: If set, only clean this specific runtime.
exclude: If set, skip this runtime during cleanup.
"""
if not stale_names:
return

# Determine which runtimes to clean, mirroring install-time logic.
all_runtimes = {"vscode", "copilot", "codex"}
if runtime:
target_runtimes = {runtime}
else:
target_runtimes = builtins.set(all_runtimes)
if exclude:
target_runtimes.discard(exclude)

# Build an expanded set that includes both the full reference and the
# last-segment short name so we match config keys in every runtime.
expanded_stale: builtins.set = builtins.set()
for n in stale_names:
expanded_stale.add(n)
if "/" in n:
expanded_stale.add(n.rsplit("/", 1)[-1])

# Clean .vscode/mcp.json
if "vscode" in target_runtimes:
vscode_mcp = Path.cwd() / ".vscode" / "mcp.json"
if vscode_mcp.exists():
try:
import json as _json

config = _json.loads(vscode_mcp.read_text(encoding="utf-8"))
servers = config.get("servers", {})
removed = [n for n in expanded_stale if n in servers]
for name in removed:
del servers[name]
if removed:
vscode_mcp.write_text(
_json.dumps(config, indent=2), encoding="utf-8"
)
for name in removed:
_rich_info(f"✓ Removed stale MCP server '{name}' from .vscode/mcp.json")
except Exception:
pass # best-effort cleanup

# Clean ~/.copilot/mcp-config.json
if "copilot" in target_runtimes:
copilot_mcp = Path.home() / ".copilot" / "mcp-config.json"
if copilot_mcp.exists():
try:
import json as _json

config = _json.loads(copilot_mcp.read_text(encoding="utf-8"))
servers = config.get("mcpServers", {})
removed = [n for n in expanded_stale if n in servers]
for name in removed:
del servers[name]
if removed:
copilot_mcp.write_text(
_json.dumps(config, indent=2), encoding="utf-8"
)
for name in removed:
_rich_info(f"✓ Removed stale MCP server '{name}' from Copilot CLI config")
except Exception:
pass # best-effort cleanup

# Clean ~/.codex/config.toml (mcp_servers section)
if "codex" in target_runtimes:
codex_cfg = Path.home() / ".codex" / "config.toml"
if codex_cfg.exists():
try:
import toml as _toml

config = _toml.loads(codex_cfg.read_text(encoding="utf-8"))
servers = config.get("mcp_servers", {})
removed = [n for n in expanded_stale if n in servers]
for name in removed:
del servers[name]
if removed:
codex_cfg.write_text(
_toml.dumps(config), encoding="utf-8"
)
for name in removed:
_rich_info(f"✓ Removed stale MCP server '{name}' from Codex CLI config")
except Exception:
pass # best-effort cleanup


def _update_lockfile_mcp_servers(mcp_server_names: builtins.set) -> None:
"""Update the lockfile with the current set of APM-managed MCP server names.

Args:
mcp_server_names: Set of MCP server names currently managed by APM.
"""
lock_path = Path.cwd() / "apm.lock"
if not lock_path.exists():
return
try:
lockfile = LockFile.read(lock_path)
if lockfile is None:
return
lockfile.mcp_servers = sorted(mcp_server_names)
lockfile.save(lock_path)
except Exception:
pass # best-effort


def _install_mcp_dependencies(
mcp_deps: list, runtime: str = None, exclude: str = None, verbose: bool = False
):
Expand Down
4 changes: 4 additions & 0 deletions src/apm_cli/deps/lockfile.py
Original file line number Diff line number Diff line change
Expand Up @@ -118,6 +118,7 @@ class LockFile:
)
apm_version: Optional[str] = None
dependencies: Dict[str, LockedDependency] = field(default_factory=dict)
mcp_servers: List[str] = field(default_factory=list)

def add_dependency(self, dep: LockedDependency) -> None:
"""Add a dependency to the lock file."""
Expand Down Expand Up @@ -146,6 +147,8 @@ def to_yaml(self) -> str:
if self.apm_version:
data["apm_version"] = self.apm_version
data["dependencies"] = [dep.to_dict() for dep in self.get_all_dependencies()]
if self.mcp_servers:
data["mcp_servers"] = sorted(self.mcp_servers)
return yaml.dump(
data, default_flow_style=False, sort_keys=False, allow_unicode=True
)
Expand All @@ -165,6 +168,7 @@ def from_yaml(cls, yaml_str: str) -> "LockFile":
)
for dep_data in data.get("dependencies", []):
lock.add_dependency(LockedDependency.from_dict(dep_data))
lock.mcp_servers = list(data.get("mcp_servers", []))
return lock

def write(self, path: Path) -> None:
Expand Down
Loading