From 96d2a83fc3be6b78fced4f7e8d3eba4df7c69b42 Mon Sep 17 00:00:00 2001 From: sergio-sisternes-epam Date: Fri, 27 Feb 2026 18:25:33 +0000 Subject: [PATCH 1/9] feat: support transitive MCP dependency propagation (#121) - Fix MCP dependency parsing to preserve both string and dict entries - Add transitive MCP collection from apm_modules/ dependency tree - Add deduplication, inline writing to .vscode/mcp.json and ~/.copilot/mcp-config.json - Add 25 unit tests covering parsing, collection, dedup, and file writing - Update docs/dependencies.md and docs/integrations.md --- docs/dependencies.md | 14 +- docs/integrations.md | 7 + src/apm_cli/cli.py | 360 +++++++++++++++++++++++------- src/apm_cli/models/apm_package.py | 15 +- tests/unit/test_transitive_mcp.py | 328 +++++++++++++++++++++++++++ 5 files changed, 636 insertions(+), 88 deletions(-) create mode 100644 tests/unit/test_transitive_mcp.py diff --git a/docs/dependencies.md b/docs/dependencies.md index 9f172666d..816a735bc 100644 --- a/docs/dependencies.md +++ b/docs/dependencies.md @@ -75,9 +75,21 @@ dependencies: - microsoft/apm-sample-package # Design standards, prompts - github/awesome-copilot/skills/review-and-refactor # Code review skill mcp: - - io.github.github/github-mcp-server + - io.github.github/github-mcp-server # Registry reference + - name: my-knowledge-base # Inline config + type: streamable-http + url: https://my-kb.example.com/ ``` +MCP dependencies support two formats: + +- **Registry strings** — resolve via the MCP server registry (e.g. `io.github.github/github-mcp-server`). +- **Inline dicts** — written directly to runtime configs, bypassing the registry. Useful for private or self-hosted servers. + +Inline entries require `name`, `type` (`sse` or `streamable-http`), and `url`. An optional `headers` map is also supported. + +MCP dependencies declared by transitive APM packages are collected automatically during `apm install`. + ### 2. Install Dependencies ```bash diff --git a/docs/integrations.md b/docs/integrations.md index a152fd9fc..d243195c7 100644 --- a/docs/integrations.md +++ b/docs/integrations.md @@ -483,11 +483,18 @@ APM provides first-class support for MCP servers: # apm.yml - MCP dependencies dependencies: mcp: + # Registry references - ghcr.io/github/github-mcp-server - ghcr.io/modelcontextprotocol/filesystem-server - ghcr.io/modelcontextprotocol/postgres-server + # Inline config (private / self-hosted servers) + - name: my-knowledge-base + type: streamable-http + url: https://my-kb.example.com/ ``` +Inline MCP entries (`name`/`type`/`url` dicts) are written directly into runtime configs and bypass the registry. They are also collected transitively from APM dependencies. + ```bash # Install MCP dependencies apm install diff --git a/src/apm_cli/cli.py b/src/apm_cli/cli.py index 6c2827f46..5260d8a85 100644 --- a/src/apm_cli/cli.py +++ b/src/apm_cli/cli.py @@ -701,6 +701,13 @@ def install(ctx, packages, runtime, exclude, only, update, dry_run, verbose): elif should_install_apm and not apm_deps: _rich_info("No APM dependencies found in apm.yml") + # Collect transitive MCP dependencies from resolved APM packages + if should_install_mcp and should_install_apm: + transitive_mcp = _collect_transitive_mcp_deps(Path.cwd() / "apm_modules") + if transitive_mcp: + _rich_info(f"Collected {len(transitive_mcp)} transitive MCP dependency(ies)") + mcp_deps = _deduplicate_mcp_deps(mcp_deps + transitive_mcp) + # Continue with MCP installation (existing logic) mcp_count = 0 if should_install_mcp and mcp_deps: @@ -2140,13 +2147,78 @@ def matches_filter(dep): raise RuntimeError(f"Failed to resolve APM dependencies: {e}") +def _collect_transitive_mcp_deps(apm_modules_dir: Path) -> list: + """Collect MCP dependencies from all resolved APM packages in apm_modules/. + + Walks every apm.yml inside apm_modules/ and returns a flat list of their + MCP entries (strings or inline dicts). + + Args: + apm_modules_dir: Path to the apm_modules directory. + + Returns: + List of MCP dependency entries (str or dict). + """ + if not apm_modules_dir.exists(): + return [] + + from apm_cli.models.apm_package import APMPackage + + collected = [] + for apm_yml_path in apm_modules_dir.rglob("apm.yml"): + try: + pkg = APMPackage.from_apm_yml(apm_yml_path) + mcp = pkg.get_mcp_dependencies() + if mcp: + collected.extend(mcp) + except Exception: + # Skip packages that fail to parse + continue + return collected + + +def _deduplicate_mcp_deps(deps: list) -> list: + """Deduplicate a mixed list of MCP deps (strings and dicts). + + Strings are deduped by value; dicts are deduped by their 'name' key. + + Args: + deps: Mixed list of str and dict MCP entries. + + Returns: + Deduplicated list preserving insertion order. + """ + import builtins + + seen_strings: builtins.set = builtins.set() + seen_names: builtins.set = builtins.set() + result = [] + for dep in deps: + if isinstance(dep, str): + if dep not in seen_strings: + seen_strings.add(dep) + result.append(dep) + elif isinstance(dep, dict): + name = dep.get("name", "") + if name and name not in seen_names: + seen_names.add(name) + result.append(dep) + elif not name and dep not in result: + result.append(dep) + return result + + def _install_mcp_dependencies( - mcp_deps: List[str], runtime: str = None, exclude: str = None, verbose: bool = False + mcp_deps: list, runtime: str = None, exclude: str = None, verbose: bool = False ): """Install MCP dependencies using existing logic. + Supports two formats: + - Registry strings (e.g. "ghcr.io/github/github-mcp-server") + - Inline dicts (e.g. {"name": "my-server", "type": "sse", "url": "…"}) + Args: - mcp_deps: List of MCP dependency names + mcp_deps: List of MCP dependency entries (str or dict) runtime: Target specific runtime only exclude: Exclude specific runtime from installation verbose: Show detailed installation information @@ -2158,6 +2230,10 @@ def _install_mcp_dependencies( _rich_warning("No MCP dependencies found in apm.yml") return 0 + # Separate registry strings from inline dict configs + registry_deps = [d for d in mcp_deps if isinstance(d, str)] + inline_deps = [d for d in mcp_deps if isinstance(d, dict)] + console = _get_console() # Start MCP section with clean header @@ -2284,99 +2360,225 @@ def _install_mcp_dependencies( # Use the new registry operations module for better server detection configured_count = 0 - try: - from apm_cli.registry.operations import MCPServerOperations - operations = MCPServerOperations() + # --- Registry-based deps (strings) --- + if registry_deps: + try: + from apm_cli.registry.operations import MCPServerOperations - # Early validation: check if all servers exist in registry (fail-fast like npm) - if verbose: - _rich_info(f"Validating {len(mcp_deps)} servers...") - valid_servers, invalid_servers = operations.validate_servers_exist(mcp_deps) + operations = MCPServerOperations() - if invalid_servers: - _rich_error( - f"Server(s) not found in registry: {', '.join(invalid_servers)}" - ) - _rich_info("Run 'apm mcp search ' to find available servers") - raise RuntimeError( - f"Cannot install {len(invalid_servers)} missing server(s)" - ) + # Early validation: check if all servers exist in registry (fail-fast like npm) + if verbose: + _rich_info(f"Validating {len(registry_deps)} registry servers...") + valid_servers, invalid_servers = operations.validate_servers_exist(registry_deps) - if not valid_servers: - if console: - console.print("└─ [green]No servers to install[/green]") - else: - _rich_success("No servers to install") - return 0 + if invalid_servers: + _rich_error( + f"Server(s) not found in registry: {', '.join(invalid_servers)}" + ) + _rich_info("Run 'apm mcp search ' to find available servers") + raise RuntimeError( + f"Cannot install {len(invalid_servers)} missing server(s)" + ) - # Check which valid servers actually need installation - servers_to_install = operations.check_servers_needing_installation( - target_runtimes, valid_servers - ) + if valid_servers: + # Check which valid servers actually need installation + servers_to_install = operations.check_servers_needing_installation( + target_runtimes, valid_servers + ) - if not servers_to_install: - # All already configured - if console: - for dep in mcp_deps: - console.print( - f"│ [green]✓[/green] {dep} [dim](already configured)[/dim]" + if not servers_to_install: + # All already configured + if console: + for dep in registry_deps: + console.print( + f"│ [green]✓[/green] {dep} [dim](already configured)[/dim]" + ) + else: + _rich_success("All registry MCP servers already configured") + configured_count += len(registry_deps) + else: + # Batch fetch server info once to avoid duplicate registry calls + if verbose: + _rich_info(f"Installing {len(servers_to_install)} servers...") + server_info_cache = operations.batch_fetch_server_info(servers_to_install) + + # Collect both environment and runtime variables using cached server info + shared_env_vars = operations.collect_environment_variables( + servers_to_install, server_info_cache + ) + shared_runtime_vars = operations.collect_runtime_variables( + servers_to_install, server_info_cache ) - console.print("└─ [green]All servers up to date[/green]") - else: - _rich_success("All MCP servers already configured") - return len(mcp_deps) - else: - # Batch fetch server info once to avoid duplicate registry calls - if verbose: - _rich_info(f"Installing {len(servers_to_install)} servers...") - server_info_cache = operations.batch_fetch_server_info(servers_to_install) - # Collect both environment and runtime variables using cached server info - shared_env_vars = operations.collect_environment_variables( - servers_to_install, server_info_cache - ) - shared_runtime_vars = operations.collect_runtime_variables( - servers_to_install, server_info_cache + # Install for each target runtime using cached server info and shared variables + for dep in servers_to_install: + if console: + console.print(f"│ [cyan]⬇️[/cyan] {dep}") + console.print( + f"│ └─ Configuring for {', '.join([rt.title() for rt in target_runtimes])}..." + ) + + for rt in target_runtimes: + if verbose: + _rich_info(f"Configuring {rt}...") + _install_for_runtime( + rt, + [dep], # Install one at a time for better output + shared_env_vars, + server_info_cache, + shared_runtime_vars, + ) + + if console: + console.print( + f"│ [green]✓[/green] {dep} → {', '.join([rt.title() for rt in target_runtimes])}" + ) + configured_count += 1 + + except ImportError: + _rich_warning("Registry operations not available") + _rich_error("Cannot validate MCP servers without registry operations") + raise RuntimeError("Registry operations module required for MCP installation") + + # --- Inline dict deps (name/type/url) --- + if inline_deps: + inline_count = _install_inline_mcp_deps(inline_deps, target_runtimes, verbose) + configured_count += inline_count + + # Close the panel + if console: + if configured_count > 0: + console.print( + f"└─ [green]Configured {configured_count} server{'s' if configured_count != 1 else ''}[/green]" ) + else: + console.print("└─ [green]All servers up to date[/green]") - # Install for each target runtime using cached server info and shared variables - for dep in servers_to_install: - if console: - console.print(f"│ [cyan]⬇️[/cyan] {dep}") - console.print( - f"│ └─ Configuring for {', '.join([rt.title() for rt in target_runtimes])}..." - ) + return configured_count + + +def _install_inline_mcp_deps( + inline_deps: list, target_runtimes: list, verbose: bool = False +) -> int: + """Install inline MCP dependencies (dict configs) directly into runtime configs. + + Inline deps look like: {"name": "my-server", "type": "sse", "url": "https://..."} + They bypass the MCP registry and are written directly. + + Args: + inline_deps: List of dict MCP entries. + target_runtimes: List of target runtime names. + verbose: Show detailed output. - for rt in target_runtimes: + Returns: + int: Number of servers configured. + """ + import json + from pathlib import Path + + console = _get_console() + configured = 0 + + for dep in inline_deps: + name = dep.get("name", "") + server_type = dep.get("type", "sse") + url = dep.get("url", "") + headers = dep.get("headers", {}) + + if not name or not url: + _rich_warning(f"Skipping inline MCP dep with missing name or url: {dep}") + continue + + # Build the server config entry + server_config = {"type": server_type, "url": url} + if headers: + server_config["headers"] = headers + + for rt in target_runtimes: + try: + if rt == "vscode": + _write_inline_mcp_vscode(name, server_config, verbose) + elif rt == "copilot": + _write_inline_mcp_copilot(name, server_config, verbose) + elif rt == "codex": + _write_inline_mcp_copilot(name, server_config, verbose) + else: if verbose: - _rich_info(f"Configuring {rt}...") - _install_for_runtime( - rt, - [dep], # Install one at a time for better output - shared_env_vars, - server_info_cache, - shared_runtime_vars, - ) + _rich_warning(f"Unsupported runtime '{rt}' for inline MCP config") + continue + except Exception as e: + _rich_error(f"Failed to configure inline MCP '{name}' for {rt}: {e}") + continue - if console: - console.print( - f"│ [green]✓[/green] {dep} → {', '.join([rt.title() for rt in target_runtimes])}" - ) - configured_count += 1 + if console: + console.print( + f"│ [green]✓[/green] {name} (inline) → {', '.join([rt.title() for rt in target_runtimes])}" + ) + configured += 1 - # Close the panel - if console: - console.print( - f"└─ [green]Configured {configured_count} server{'s' if configured_count != 1 else ''}[/green]" - ) + return configured - return configured_count - except ImportError: - _rich_warning("Registry operations not available") - _rich_error("Cannot validate MCP servers without registry operations") - raise RuntimeError("Registry operations module required for MCP installation") +def _write_inline_mcp_vscode(name: str, server_config: dict, verbose: bool = False): + """Write an inline MCP server config to .vscode/mcp.json.""" + import json + from pathlib import Path + + vscode_dir = Path.cwd() / ".vscode" + vscode_dir.mkdir(parents=True, exist_ok=True) + mcp_path = vscode_dir / "mcp.json" + + config = {} + if mcp_path.exists(): + try: + with open(mcp_path, "r", encoding="utf-8") as f: + config = json.load(f) + except (json.JSONDecodeError, IOError): + config = {} + + if "servers" not in config: + config["servers"] = {} + + if name in config["servers"]: + if verbose: + _rich_info(f" {name} already in .vscode/mcp.json, updating") + + config["servers"][name] = server_config + + with open(mcp_path, "w", encoding="utf-8") as f: + json.dump(config, f, indent=2) + + +def _write_inline_mcp_copilot(name: str, server_config: dict, verbose: bool = False): + """Write an inline MCP server config to ~/.copilot/mcp-config.json.""" + import json + from pathlib import Path + + copilot_dir = Path.home() / ".copilot" + copilot_dir.mkdir(parents=True, exist_ok=True) + config_path = copilot_dir / "mcp-config.json" + + config = {} + if config_path.exists(): + try: + with open(config_path, "r", encoding="utf-8") as f: + config = json.load(f) + except (json.JSONDecodeError, IOError): + config = {} + + if "mcpServers" not in config: + config["mcpServers"] = {} + + if name in config["mcpServers"]: + if verbose: + _rich_info(f" {name} already in copilot config, updating") + + config["mcpServers"][name] = server_config + + with open(config_path, "w", encoding="utf-8") as f: + json.dump(config, f, indent=2) def _show_install_summary( diff --git a/src/apm_cli/models/apm_package.py b/src/apm_cli/models/apm_package.py index 003f72e58..86c3b7945 100644 --- a/src/apm_cli/models/apm_package.py +++ b/src/apm_cli/models/apm_package.py @@ -708,7 +708,7 @@ class APMPackage: license: Optional[str] = None source: Optional[str] = None # Source location (for dependencies) resolved_commit: Optional[str] = None # Resolved commit SHA (for dependencies) - dependencies: Optional[Dict[str, List[Union[DependencyReference, str]]]] = None # Mixed types for APM/MCP + dependencies: Optional[Dict[str, List[Union[DependencyReference, str, dict]]]] = None # Mixed types for APM/MCP/inline scripts: Optional[Dict[str, str]] = None package_path: Optional[Path] = None # Local path to package target: Optional[str] = None # Target agent: vscode, claude, or all (applies to compile and install) @@ -763,8 +763,8 @@ def from_apm_yml(cls, apm_yml_path: Path) -> "APMPackage": raise ValueError(f"Invalid APM dependency '{dep_str}': {e}") dependencies[dep_type] = parsed_deps else: - # Other dependencies (like MCP) remain as strings - dependencies[dep_type] = [str(dep) for dep in dep_list if isinstance(dep, str)] + # Other dependencies (like MCP): keep strings and dicts + dependencies[dep_type] = [dep for dep in dep_list if isinstance(dep, (str, dict))] # Parse package content type pkg_type = None @@ -797,13 +797,12 @@ def get_apm_dependencies(self) -> List[DependencyReference]: # Filter to only return DependencyReference objects return [dep for dep in self.dependencies['apm'] if isinstance(dep, DependencyReference)] - def get_mcp_dependencies(self) -> List[str]: - """Get list of MCP dependencies (as strings for compatibility).""" + def get_mcp_dependencies(self) -> List[Union[str, dict]]: + """Get list of MCP dependencies (strings for registry, dicts for inline configs).""" if not self.dependencies or 'mcp' not in self.dependencies: return [] - # MCP deps are stored as strings, not DependencyReference objects - return [str(dep) if isinstance(dep, DependencyReference) else dep - for dep in self.dependencies.get('mcp', [])] + return [dep for dep in self.dependencies.get('mcp', []) + if isinstance(dep, (str, dict))] def has_apm_dependencies(self) -> bool: """Check if this package has APM dependencies.""" diff --git a/tests/unit/test_transitive_mcp.py b/tests/unit/test_transitive_mcp.py new file mode 100644 index 000000000..154be3049 --- /dev/null +++ b/tests/unit/test_transitive_mcp.py @@ -0,0 +1,328 @@ +"""Tests for transitive MCP dependency collection, deduplication, and inline installation.""" + +import json +import os +import tempfile +import unittest +from pathlib import Path +from unittest.mock import patch, MagicMock + +import yaml + +from apm_cli.models.apm_package import APMPackage +from apm_cli.cli import ( + _collect_transitive_mcp_deps, + _deduplicate_mcp_deps, + _install_inline_mcp_deps, + _write_inline_mcp_vscode, + _write_inline_mcp_copilot, +) + + +# --------------------------------------------------------------------------- +# APMPackage – MCP dict parsing +# --------------------------------------------------------------------------- +class TestAPMPackageMCPParsing(unittest.TestCase): + """Ensure apm_package preserves both string and dict MCP entries.""" + + def test_parse_string_mcp_deps(self): + """String-only MCP deps parse correctly.""" + with tempfile.TemporaryDirectory() as tmp: + yml = Path(tmp) / "apm.yml" + yml.write_text(yaml.dump({ + "name": "pkg", + "version": "1.0.0", + "dependencies": {"mcp": ["ghcr.io/some/server"]}, + })) + pkg = APMPackage.from_apm_yml(yml) + deps = pkg.get_mcp_dependencies() + + self.assertEqual(deps, ["ghcr.io/some/server"]) + + def test_parse_dict_mcp_deps(self): + """Inline dict MCP deps are preserved.""" + inline = {"name": "my-srv", "type": "sse", "url": "https://example.com"} + with tempfile.TemporaryDirectory() as tmp: + yml = Path(tmp) / "apm.yml" + yml.write_text(yaml.dump({ + "name": "pkg", + "version": "1.0.0", + "dependencies": {"mcp": [inline]}, + })) + pkg = APMPackage.from_apm_yml(yml) + deps = pkg.get_mcp_dependencies() + + self.assertEqual(len(deps), 1) + self.assertIsInstance(deps[0], dict) + self.assertEqual(deps[0]["name"], "my-srv") + + def test_parse_mixed_mcp_deps(self): + """A mix of string and dict entries is preserved in order.""" + inline = {"name": "inline-srv", "type": "streamable-http", "url": "https://x"} + with tempfile.TemporaryDirectory() as tmp: + yml = Path(tmp) / "apm.yml" + yml.write_text(yaml.dump({ + "name": "pkg", + "version": "1.0.0", + "dependencies": {"mcp": ["registry-srv", inline]}, + })) + pkg = APMPackage.from_apm_yml(yml) + deps = pkg.get_mcp_dependencies() + + self.assertEqual(len(deps), 2) + self.assertIsInstance(deps[0], str) + self.assertIsInstance(deps[1], dict) + + def test_no_mcp_section(self): + """Missing MCP section returns empty list.""" + with tempfile.TemporaryDirectory() as tmp: + yml = Path(tmp) / "apm.yml" + yml.write_text(yaml.dump({ + "name": "pkg", + "version": "1.0.0", + })) + pkg = APMPackage.from_apm_yml(yml) + self.assertEqual(pkg.get_mcp_dependencies(), []) + + +# --------------------------------------------------------------------------- +# _collect_transitive_mcp_deps +# --------------------------------------------------------------------------- +class TestCollectTransitiveMCPDeps(unittest.TestCase): + """Tests for scanning apm_modules/ for MCP deps.""" + + def test_empty_when_dir_missing(self): + with tempfile.TemporaryDirectory() as tmp: + result = _collect_transitive_mcp_deps(Path(tmp) / "nonexistent") + self.assertEqual(result, []) + + def test_collects_string_deps(self): + with tempfile.TemporaryDirectory() as tmp: + pkg_dir = Path(tmp) / "org" / "pkg-a" + pkg_dir.mkdir(parents=True) + (pkg_dir / "apm.yml").write_text(yaml.dump({ + "name": "pkg-a", + "version": "1.0.0", + "dependencies": {"mcp": ["ghcr.io/a/server"]}, + })) + result = _collect_transitive_mcp_deps(Path(tmp)) + self.assertEqual(result, ["ghcr.io/a/server"]) + + def test_collects_dict_deps(self): + inline = {"name": "kb", "type": "sse", "url": "https://kb.example.com"} + with tempfile.TemporaryDirectory() as tmp: + pkg_dir = Path(tmp) / "org" / "pkg-b" + pkg_dir.mkdir(parents=True) + (pkg_dir / "apm.yml").write_text(yaml.dump({ + "name": "pkg-b", + "version": "1.0.0", + "dependencies": {"mcp": [inline]}, + })) + result = _collect_transitive_mcp_deps(Path(tmp)) + self.assertEqual(len(result), 1) + self.assertEqual(result[0]["name"], "kb") + + def test_collects_from_multiple_packages(self): + with tempfile.TemporaryDirectory() as tmp: + for i, dep in enumerate(["ghcr.io/a/s1", "ghcr.io/b/s2"]): + d = Path(tmp) / "org" / f"pkg-{i}" + d.mkdir(parents=True) + (d / "apm.yml").write_text(yaml.dump({ + "name": f"pkg-{i}", + "version": "1.0.0", + "dependencies": {"mcp": [dep]}, + })) + result = _collect_transitive_mcp_deps(Path(tmp)) + self.assertEqual(len(result), 2) + + def test_skips_unparseable_apm_yml(self): + with tempfile.TemporaryDirectory() as tmp: + pkg_dir = Path(tmp) / "org" / "bad-pkg" + pkg_dir.mkdir(parents=True) + (pkg_dir / "apm.yml").write_text("invalid: yaml: [") + # Should not raise + result = _collect_transitive_mcp_deps(Path(tmp)) + self.assertEqual(result, []) + + +# --------------------------------------------------------------------------- +# _deduplicate_mcp_deps +# --------------------------------------------------------------------------- +class TestDeduplicateMCPDeps(unittest.TestCase): + + def test_deduplicates_strings(self): + deps = ["a", "b", "a", "c", "b"] + self.assertEqual(_deduplicate_mcp_deps(deps), ["a", "b", "c"]) + + def test_deduplicates_dicts_by_name(self): + d1 = {"name": "srv", "type": "sse", "url": "https://one"} + d2 = {"name": "srv", "type": "sse", "url": "https://two"} # same name + d3 = {"name": "other", "type": "sse", "url": "https://three"} + result = _deduplicate_mcp_deps([d1, d2, d3]) + self.assertEqual(len(result), 2) + self.assertEqual(result[0]["url"], "https://one") # first wins + + def test_mixed_dedup(self): + inline = {"name": "kb", "type": "sse", "url": "https://kb"} + deps = ["a", inline, "a", {"name": "kb", "type": "sse", "url": "https://kb2"}] + result = _deduplicate_mcp_deps(deps) + self.assertEqual(len(result), 2) + self.assertIsInstance(result[0], str) + self.assertIsInstance(result[1], dict) + + def test_empty_list(self): + self.assertEqual(_deduplicate_mcp_deps([]), []) + + def test_dict_without_name_kept(self): + """Dicts without 'name' are kept if not already in result.""" + d = {"type": "sse", "url": "https://x"} + result = _deduplicate_mcp_deps([d, d]) + self.assertEqual(len(result), 1) + + +# --------------------------------------------------------------------------- +# _write_inline_mcp_vscode +# --------------------------------------------------------------------------- +class TestWriteInlineMCPVscode(unittest.TestCase): + + def test_creates_file_from_scratch(self): + with tempfile.TemporaryDirectory() as tmp: + with patch("apm_cli.cli.Path.cwd", return_value=Path(tmp)): + _write_inline_mcp_vscode("test-srv", {"type": "sse", "url": "https://x"}) + + mcp_path = Path(tmp) / ".vscode" / "mcp.json" + self.assertTrue(mcp_path.exists()) + data = json.loads(mcp_path.read_text()) + self.assertIn("test-srv", data["servers"]) + self.assertEqual(data["servers"]["test-srv"]["url"], "https://x") + + def test_merges_into_existing_file(self): + with tempfile.TemporaryDirectory() as tmp: + vscode_dir = Path(tmp) / ".vscode" + vscode_dir.mkdir() + mcp_path = vscode_dir / "mcp.json" + mcp_path.write_text(json.dumps({"servers": {"existing": {"type": "stdio"}}})) + + with patch("apm_cli.cli.Path.cwd", return_value=Path(tmp)): + _write_inline_mcp_vscode("new-srv", {"type": "sse", "url": "https://new"}) + + data = json.loads(mcp_path.read_text()) + self.assertIn("existing", data["servers"]) + self.assertIn("new-srv", data["servers"]) + + def test_overwrites_same_name(self): + with tempfile.TemporaryDirectory() as tmp: + vscode_dir = Path(tmp) / ".vscode" + vscode_dir.mkdir() + mcp_path = vscode_dir / "mcp.json" + mcp_path.write_text(json.dumps({"servers": {"srv": {"type": "sse", "url": "https://old"}}})) + + with patch("apm_cli.cli.Path.cwd", return_value=Path(tmp)): + _write_inline_mcp_vscode("srv", {"type": "sse", "url": "https://new"}) + + data = json.loads(mcp_path.read_text()) + self.assertEqual(data["servers"]["srv"]["url"], "https://new") + + +# --------------------------------------------------------------------------- +# _write_inline_mcp_copilot +# --------------------------------------------------------------------------- +class TestWriteInlineMCPCopilot(unittest.TestCase): + + def test_creates_file_from_scratch(self): + with tempfile.TemporaryDirectory() as tmp: + with patch("apm_cli.cli.Path.home", return_value=Path(tmp)): + _write_inline_mcp_copilot("cp-srv", {"type": "sse", "url": "https://cp"}) + + config_path = Path(tmp) / ".copilot" / "mcp-config.json" + self.assertTrue(config_path.exists()) + data = json.loads(config_path.read_text()) + self.assertIn("cp-srv", data["mcpServers"]) + + def test_merges_into_existing_file(self): + with tempfile.TemporaryDirectory() as tmp: + copilot_dir = Path(tmp) / ".copilot" + copilot_dir.mkdir() + config_path = copilot_dir / "mcp-config.json" + config_path.write_text(json.dumps({"mcpServers": {"old": {"type": "stdio"}}})) + + with patch("apm_cli.cli.Path.home", return_value=Path(tmp)): + _write_inline_mcp_copilot("new", {"type": "sse", "url": "https://new"}) + + data = json.loads(config_path.read_text()) + self.assertIn("old", data["mcpServers"]) + self.assertIn("new", data["mcpServers"]) + + +# --------------------------------------------------------------------------- +# _install_inline_mcp_deps +# --------------------------------------------------------------------------- +class TestInstallInlineMCPDeps(unittest.TestCase): + + @patch("apm_cli.cli._write_inline_mcp_vscode") + @patch("apm_cli.cli._write_inline_mcp_copilot") + @patch("apm_cli.cli._get_console", return_value=None) + def test_installs_for_all_runtimes(self, _console, mock_copilot, mock_vscode): + deps = [{"name": "s1", "type": "sse", "url": "https://s1"}] + count = _install_inline_mcp_deps(deps, ["vscode", "copilot"]) + + self.assertEqual(count, 1) + mock_vscode.assert_called_once() + mock_copilot.assert_called_once() + + @patch("apm_cli.cli._write_inline_mcp_vscode") + @patch("apm_cli.cli._write_inline_mcp_copilot") + @patch("apm_cli.cli._get_console", return_value=None) + def test_skips_dep_without_name(self, _console, mock_copilot, mock_vscode): + deps = [{"type": "sse", "url": "https://no-name"}] + count = _install_inline_mcp_deps(deps, ["vscode"]) + + self.assertEqual(count, 0) + mock_vscode.assert_not_called() + + @patch("apm_cli.cli._write_inline_mcp_vscode") + @patch("apm_cli.cli._write_inline_mcp_copilot") + @patch("apm_cli.cli._get_console", return_value=None) + def test_skips_dep_without_url(self, _console, mock_copilot, mock_vscode): + deps = [{"name": "srv"}] + count = _install_inline_mcp_deps(deps, ["vscode"]) + + self.assertEqual(count, 0) + mock_vscode.assert_not_called() + + @patch("apm_cli.cli._write_inline_mcp_vscode") + @patch("apm_cli.cli._get_console", return_value=None) + def test_includes_headers_when_present(self, _console, mock_vscode): + deps = [{"name": "s", "type": "sse", "url": "https://s", "headers": {"Authorization": "Bearer x"}}] + _install_inline_mcp_deps(deps, ["vscode"]) + + call_args = mock_vscode.call_args + server_config = call_args[0][1] + self.assertIn("headers", server_config) + self.assertEqual(server_config["headers"]["Authorization"], "Bearer x") + + @patch("apm_cli.cli._write_inline_mcp_vscode", side_effect=Exception("write failed")) + @patch("apm_cli.cli._get_console", return_value=None) + def test_continues_on_write_failure(self, _console, mock_vscode): + """Failure writing one dep should not prevent the next from being counted.""" + deps = [ + {"name": "fail", "type": "sse", "url": "https://fail"}, + {"name": "ok", "type": "sse", "url": "https://ok"}, + ] + # Both raise, so configured count is still 2 (the loop counts after the rt loop) + # Actually both will error on vscode write but the counter increments after the rt loop + count = _install_inline_mcp_deps(deps, ["vscode"]) + # Even though writes fail, configured is still incremented (errors are logged, not raised) + self.assertEqual(count, 2) + + @patch("apm_cli.cli._write_inline_mcp_copilot") + @patch("apm_cli.cli._get_console", return_value=None) + def test_codex_runtime_uses_copilot_writer(self, _console, mock_copilot): + deps = [{"name": "s", "type": "sse", "url": "https://s"}] + _install_inline_mcp_deps(deps, ["codex"]) + + mock_copilot.assert_called_once() + + +if __name__ == "__main__": + unittest.main() From 5560597698f08931c4cb5f3185c4af00ae6f32ea Mon Sep 17 00:00:00 2001 From: sergio-sisternes-epam Date: Fri, 27 Feb 2026 18:44:53 +0000 Subject: [PATCH 2/9] fix: address PR review feedback (#123) - Scope _collect_transitive_mcp_deps to apm.lock entries instead of full rglob - Gate transitive MCP on should_install_mcp + apm_modules existence - Add _KNOWN_MCP_TYPES validation with warning for unknown types - Emit warnings on JSON parse failures in vscode/copilot config writers - Fix success counting: only count when at least one runtime write succeeds - Remove unused imports in cli.py and test file - Update test_continues_on_write_failure to assert count==0 --- src/apm_cli/cli.py | 65 +++++++++++++++++++++++-------- tests/unit/test_transitive_mcp.py | 13 +++---- 2 files changed, 55 insertions(+), 23 deletions(-) diff --git a/src/apm_cli/cli.py b/src/apm_cli/cli.py index 5260d8a85..803a1da86 100644 --- a/src/apm_cli/cli.py +++ b/src/apm_cli/cli.py @@ -702,8 +702,10 @@ def install(ctx, packages, runtime, exclude, only, update, dry_run, verbose): _rich_info("No APM dependencies found in apm.yml") # Collect transitive MCP dependencies from resolved APM packages - if should_install_mcp and should_install_apm: - transitive_mcp = _collect_transitive_mcp_deps(Path.cwd() / "apm_modules") + apm_modules_path = Path.cwd() / "apm_modules" + if should_install_mcp and apm_modules_path.exists(): + lock_path = Path.cwd() / "apm.lock" + transitive_mcp = _collect_transitive_mcp_deps(apm_modules_path, lock_path) if transitive_mcp: _rich_info(f"Collected {len(transitive_mcp)} transitive MCP dependency(ies)") mcp_deps = _deduplicate_mcp_deps(mcp_deps + transitive_mcp) @@ -2147,14 +2149,16 @@ def matches_filter(dep): raise RuntimeError(f"Failed to resolve APM dependencies: {e}") -def _collect_transitive_mcp_deps(apm_modules_dir: Path) -> list: - """Collect MCP dependencies from all resolved APM packages in apm_modules/. +def _collect_transitive_mcp_deps(apm_modules_dir: Path, lock_path: Path = None) -> list: + """Collect MCP dependencies from resolved APM packages listed in apm.lock. - Walks every apm.yml inside apm_modules/ and returns a flat list of their - MCP entries (strings or inline dicts). + Only scans apm.yml files for packages present in apm.lock to avoid + picking up stale/orphaned packages from previous installs. + Falls back to scanning all apm.yml files if no lock file is available. Args: apm_modules_dir: Path to the apm_modules directory. + lock_path: Path to the apm.lock file (optional). Returns: List of MCP dependency entries (str or dict). @@ -2164,8 +2168,30 @@ def _collect_transitive_mcp_deps(apm_modules_dir: Path) -> list: from apm_cli.models.apm_package import APMPackage + # Build set of expected apm.yml paths from apm.lock + locked_paths = None + if lock_path and lock_path.exists(): + try: + import yaml + + with open(lock_path, "r", encoding="utf-8") as f: + lock_data = yaml.safe_load(f) or {} + locked_paths = builtins.set() + for dep in lock_data.get("dependencies", []): + host = dep.get("host", "github.com") + repo_url = dep.get("repo_url", "") + virtual_path = dep.get("virtual_path", "") + if repo_url: + yml = apm_modules_dir / host / repo_url / virtual_path / "apm.yml" if virtual_path else apm_modules_dir / host / repo_url / "apm.yml" + locked_paths.add(yml.resolve()) + except Exception: + locked_paths = None # Fall back to full scan + collected = [] for apm_yml_path in apm_modules_dir.rglob("apm.yml"): + # Skip packages not in the lock file + if locked_paths is not None and apm_yml_path.resolve() not in locked_paths: + continue try: pkg = APMPackage.from_apm_yml(apm_yml_path) mcp = pkg.get_mcp_dependencies() @@ -2473,10 +2499,9 @@ def _install_inline_mcp_deps( verbose: Show detailed output. Returns: - int: Number of servers configured. + int: Number of servers successfully configured. """ - import json - from pathlib import Path + _KNOWN_MCP_TYPES = {"sse", "http"} console = _get_console() configured = 0 @@ -2491,11 +2516,15 @@ def _install_inline_mcp_deps( _rich_warning(f"Skipping inline MCP dep with missing name or url: {dep}") continue + if server_type not in _KNOWN_MCP_TYPES: + _rich_warning(f"MCP server '{name}' has unknown type '{server_type}' (expected one of {_KNOWN_MCP_TYPES})") + # Build the server config entry server_config = {"type": server_type, "url": url} if headers: server_config["headers"] = headers + any_success = False for rt in target_runtimes: try: if rt == "vscode": @@ -2508,15 +2537,17 @@ def _install_inline_mcp_deps( if verbose: _rich_warning(f"Unsupported runtime '{rt}' for inline MCP config") continue + any_success = True except Exception as e: _rich_error(f"Failed to configure inline MCP '{name}' for {rt}: {e}") continue - if console: - console.print( - f"│ [green]✓[/green] {name} (inline) → {', '.join([rt.title() for rt in target_runtimes])}" - ) - configured += 1 + if any_success: + if console: + console.print( + f"│ [green]✓[/green] {name} (inline) → {', '.join([rt.title() for rt in target_runtimes])}" + ) + configured += 1 return configured @@ -2535,7 +2566,8 @@ def _write_inline_mcp_vscode(name: str, server_config: dict, verbose: bool = Fal try: with open(mcp_path, "r", encoding="utf-8") as f: config = json.load(f) - except (json.JSONDecodeError, IOError): + except (json.JSONDecodeError, IOError) as e: + _rich_warning(f"Could not parse {mcp_path}, resetting: {e}") config = {} if "servers" not in config: @@ -2565,7 +2597,8 @@ def _write_inline_mcp_copilot(name: str, server_config: dict, verbose: bool = Fa try: with open(config_path, "r", encoding="utf-8") as f: config = json.load(f) - except (json.JSONDecodeError, IOError): + except (json.JSONDecodeError, IOError) as e: + _rich_warning(f"Could not parse {config_path}, resetting: {e}") config = {} if "mcpServers" not in config: diff --git a/tests/unit/test_transitive_mcp.py b/tests/unit/test_transitive_mcp.py index 154be3049..5a7dbd237 100644 --- a/tests/unit/test_transitive_mcp.py +++ b/tests/unit/test_transitive_mcp.py @@ -1,11 +1,10 @@ """Tests for transitive MCP dependency collection, deduplication, and inline installation.""" import json -import os import tempfile import unittest from pathlib import Path -from unittest.mock import patch, MagicMock +from unittest.mock import patch import yaml @@ -304,16 +303,16 @@ def test_includes_headers_when_present(self, _console, mock_vscode): @patch("apm_cli.cli._write_inline_mcp_vscode", side_effect=Exception("write failed")) @patch("apm_cli.cli._get_console", return_value=None) def test_continues_on_write_failure(self, _console, mock_vscode): - """Failure writing one dep should not prevent the next from being counted.""" + """Failure writing one dep should not prevent the next from being attempted.""" deps = [ {"name": "fail", "type": "sse", "url": "https://fail"}, {"name": "ok", "type": "sse", "url": "https://ok"}, ] - # Both raise, so configured count is still 2 (the loop counts after the rt loop) - # Actually both will error on vscode write but the counter increments after the rt loop + # Both raise, so no deps are successfully configured count = _install_inline_mcp_deps(deps, ["vscode"]) - # Even though writes fail, configured is still incremented (errors are logged, not raised) - self.assertEqual(count, 2) + self.assertEqual(count, 0) + # But both were attempted + self.assertEqual(mock_vscode.call_count, 2) @patch("apm_cli.cli._write_inline_mcp_copilot") @patch("apm_cli.cli._get_console", return_value=None) From f334098cae1d7c37dae63bfccf7bf742ed0e9735 Mon Sep 17 00:00:00 2001 From: sergio-sisternes-epam Date: Fri, 27 Feb 2026 19:07:28 +0000 Subject: [PATCH 3/9] fix: replace streamable-http with http in docs and tests The correct MCP transport types are 'sse' and 'http'. Updated docs and test fixtures that incorrectly used 'streamable-http' to align with _KNOWN_MCP_TYPES validation. --- docs/dependencies.md | 4 ++-- docs/integrations.md | 2 +- tests/unit/test_transitive_mcp.py | 2 +- 3 files changed, 4 insertions(+), 4 deletions(-) diff --git a/docs/dependencies.md b/docs/dependencies.md index 816a735bc..1c9e0ebb0 100644 --- a/docs/dependencies.md +++ b/docs/dependencies.md @@ -77,7 +77,7 @@ dependencies: mcp: - io.github.github/github-mcp-server # Registry reference - name: my-knowledge-base # Inline config - type: streamable-http + type: http url: https://my-kb.example.com/ ``` @@ -86,7 +86,7 @@ MCP dependencies support two formats: - **Registry strings** — resolve via the MCP server registry (e.g. `io.github.github/github-mcp-server`). - **Inline dicts** — written directly to runtime configs, bypassing the registry. Useful for private or self-hosted servers. -Inline entries require `name`, `type` (`sse` or `streamable-http`), and `url`. An optional `headers` map is also supported. +Inline entries require `name`, `type` (`sse` or `http`), and `url`. An optional `headers` map is also supported. MCP dependencies declared by transitive APM packages are collected automatically during `apm install`. diff --git a/docs/integrations.md b/docs/integrations.md index d243195c7..64e5b10cd 100644 --- a/docs/integrations.md +++ b/docs/integrations.md @@ -489,7 +489,7 @@ dependencies: - ghcr.io/modelcontextprotocol/postgres-server # Inline config (private / self-hosted servers) - name: my-knowledge-base - type: streamable-http + type: http url: https://my-kb.example.com/ ``` diff --git a/tests/unit/test_transitive_mcp.py b/tests/unit/test_transitive_mcp.py index 5a7dbd237..d8f5c0721 100644 --- a/tests/unit/test_transitive_mcp.py +++ b/tests/unit/test_transitive_mcp.py @@ -57,7 +57,7 @@ def test_parse_dict_mcp_deps(self): def test_parse_mixed_mcp_deps(self): """A mix of string and dict entries is preserved in order.""" - inline = {"name": "inline-srv", "type": "streamable-http", "url": "https://x"} + inline = {"name": "inline-srv", "type": "http", "url": "https://x"} with tempfile.TemporaryDirectory() as tmp: yml = Path(tmp) / "apm.yml" yml.write_text(yaml.dump({ From e9c64e9792944036fdc9d71a0a3dbe5ea02c0660 Mon Sep 17 00:00:00 2001 From: sergio-sisternes-epam Date: Fri, 27 Feb 2026 19:52:01 +0000 Subject: [PATCH 4/9] fix: remove host prefix from lock-file path resolution in transitive MCP collection _collect_transitive_mcp_deps() included the lock-file 'host' field (e.g. github.com) in the apm_modules path, but packages are stored without the host segment. All locked paths resolved to non-existent directories, so zero transitive MCP deps were collected and .vscode/mcp.json was never generated. - Remove host from path construction to match get_install_path() behavior - Add tests for lock-file-scoped collection (regular and virtual_path) --- src/apm_cli/cli.py | 3 +- tests/unit/test_transitive_mcp.py | 62 +++++++++++++++++++++++++++++++ 2 files changed, 63 insertions(+), 2 deletions(-) diff --git a/src/apm_cli/cli.py b/src/apm_cli/cli.py index 803a1da86..a801f99ff 100644 --- a/src/apm_cli/cli.py +++ b/src/apm_cli/cli.py @@ -2178,11 +2178,10 @@ def _collect_transitive_mcp_deps(apm_modules_dir: Path, lock_path: Path = None) lock_data = yaml.safe_load(f) or {} locked_paths = builtins.set() for dep in lock_data.get("dependencies", []): - host = dep.get("host", "github.com") repo_url = dep.get("repo_url", "") virtual_path = dep.get("virtual_path", "") if repo_url: - yml = apm_modules_dir / host / repo_url / virtual_path / "apm.yml" if virtual_path else apm_modules_dir / host / repo_url / "apm.yml" + yml = apm_modules_dir / repo_url / virtual_path / "apm.yml" if virtual_path else apm_modules_dir / repo_url / "apm.yml" locked_paths.add(yml.resolve()) except Exception: locked_paths = None # Fall back to full scan diff --git a/tests/unit/test_transitive_mcp.py b/tests/unit/test_transitive_mcp.py index d8f5c0721..7df52704f 100644 --- a/tests/unit/test_transitive_mcp.py +++ b/tests/unit/test_transitive_mcp.py @@ -143,6 +143,68 @@ def test_skips_unparseable_apm_yml(self): result = _collect_transitive_mcp_deps(Path(tmp)) self.assertEqual(result, []) + def test_lockfile_scopes_collection_to_locked_packages(self): + """Lock-file filtering should only collect MCP deps from locked packages.""" + with tempfile.TemporaryDirectory() as tmp: + apm_modules = Path(tmp) / "apm_modules" + # Package that IS in the lock file + locked_dir = apm_modules / "org" / "locked-pkg" + locked_dir.mkdir(parents=True) + (locked_dir / "apm.yml").write_text(yaml.dump({ + "name": "locked-pkg", + "version": "1.0.0", + "dependencies": {"mcp": ["ghcr.io/locked/server"]}, + })) + # Package that is NOT in the lock file (orphan) + orphan_dir = apm_modules / "org" / "orphan-pkg" + orphan_dir.mkdir(parents=True) + (orphan_dir / "apm.yml").write_text(yaml.dump({ + "name": "orphan-pkg", + "version": "1.0.0", + "dependencies": {"mcp": ["ghcr.io/orphan/server"]}, + })) + # Write lock file referencing only the locked package + lock_path = Path(tmp) / "apm.lock" + lock_path.write_text(yaml.dump({ + "lockfile_version": "1", + "dependencies": [ + {"repo_url": "org/locked-pkg", "host": "github.com"}, + ], + })) + result = _collect_transitive_mcp_deps(apm_modules, lock_path) + self.assertEqual(result, ["ghcr.io/locked/server"]) + + def test_lockfile_with_virtual_path(self): + """Lock-file filtering works for subdirectory (virtual_path) packages.""" + with tempfile.TemporaryDirectory() as tmp: + apm_modules = Path(tmp) / "apm_modules" + # Subdirectory package matching lock entry + sub_dir = apm_modules / "org" / "monorepo" / "skills" / "azure" + sub_dir.mkdir(parents=True) + (sub_dir / "apm.yml").write_text(yaml.dump({ + "name": "azure-skill", + "version": "1.0.0", + "dependencies": {"mcp": [{"name": "learn", "type": "http", "url": "https://learn.example.com"}]}, + })) + # Another subdirectory NOT in the lock + other_dir = apm_modules / "org" / "monorepo" / "skills" / "other" + other_dir.mkdir(parents=True) + (other_dir / "apm.yml").write_text(yaml.dump({ + "name": "other-skill", + "version": "1.0.0", + "dependencies": {"mcp": ["ghcr.io/other/server"]}, + })) + lock_path = Path(tmp) / "apm.lock" + lock_path.write_text(yaml.dump({ + "lockfile_version": "1", + "dependencies": [ + {"repo_url": "org/monorepo", "host": "github.com", "virtual_path": "skills/azure"}, + ], + })) + result = _collect_transitive_mcp_deps(apm_modules, lock_path) + self.assertEqual(len(result), 1) + self.assertEqual(result[0]["name"], "learn") + # --------------------------------------------------------------------------- # _deduplicate_mcp_deps From 7f80e8940036e740e4ab98a3d754e51be074ceb6 Mon Sep 17 00:00:00 2001 From: sergio-sisternes-epam Date: Mon, 2 Mar 2026 16:00:20 +0000 Subject: [PATCH 5/9] fix: address copilot feedback for transitive MCP install --- src/apm_cli/cli.py | 44 +++++++-- tests/unit/test_transitive_mcp.py | 152 +++++++++++++++++++++++++++++- 2 files changed, 186 insertions(+), 10 deletions(-) diff --git a/src/apm_cli/cli.py b/src/apm_cli/cli.py index a801f99ff..405d2f07f 100644 --- a/src/apm_cli/cli.py +++ b/src/apm_cli/cli.py @@ -2186,11 +2186,15 @@ def _collect_transitive_mcp_deps(apm_modules_dir: Path, lock_path: Path = None) except Exception: locked_paths = None # Fall back to full scan + # Prefer iterating lock-derived paths directly (existing files only). + # Fall back to full scan only when lock parsing is unavailable. + if locked_paths is not None: + apm_yml_paths = [path for path in sorted(locked_paths) if path.exists()] + else: + apm_yml_paths = apm_modules_dir.rglob("apm.yml") + collected = [] - for apm_yml_path in apm_modules_dir.rglob("apm.yml"): - # Skip packages not in the lock file - if locked_paths is not None and apm_yml_path.resolve() not in locked_paths: - continue + for apm_yml_path in apm_yml_paths: try: pkg = APMPackage.from_apm_yml(apm_yml_path) mcp = pkg.get_mcp_dependencies() @@ -2249,7 +2253,7 @@ def _install_mcp_dependencies( verbose: Show detailed installation information Returns: - int: Number of MCP servers configured + int: Number of MCP servers newly configured """ if not mcp_deps: _rich_warning("No MCP dependencies found in apm.yml") @@ -2412,18 +2416,33 @@ def _install_mcp_dependencies( servers_to_install = operations.check_servers_needing_installation( target_runtimes, valid_servers ) + already_configured_servers = [ + dep for dep in valid_servers if dep not in servers_to_install + ] if not servers_to_install: # All already configured if console: - for dep in registry_deps: + for dep in already_configured_servers: console.print( f"│ [green]✓[/green] {dep} [dim](already configured)[/dim]" ) else: _rich_success("All registry MCP servers already configured") - configured_count += len(registry_deps) else: + # Surface already-configured servers distinctly from newly configured ones + if already_configured_servers: + if console: + for dep in already_configured_servers: + console.print( + f"│ [green]✓[/green] {dep} [dim](already configured)[/dim]" + ) + elif verbose: + _rich_info( + "Already configured registry MCP servers: " + f"{', '.join(already_configured_servers)}" + ) + # Batch fetch server info once to avoid duplicate registry calls if verbose: _rich_info(f"Installing {len(servers_to_install)} servers...") @@ -2512,7 +2531,16 @@ def _install_inline_mcp_deps( headers = dep.get("headers", {}) if not name or not url: - _rich_warning(f"Skipping inline MCP dep with missing name or url: {dep}") + safe_dep = { + "name": name or None, + "type": server_type or None, + "url_present": bool(url), + "has_headers": bool(headers), + } + _rich_warning( + "Skipping inline MCP dep with missing required fields " + f"(safe fields: {safe_dep})" + ) continue if server_type not in _KNOWN_MCP_TYPES: diff --git a/tests/unit/test_transitive_mcp.py b/tests/unit/test_transitive_mcp.py index 7df52704f..cb558a7f0 100644 --- a/tests/unit/test_transitive_mcp.py +++ b/tests/unit/test_transitive_mcp.py @@ -12,6 +12,7 @@ from apm_cli.cli import ( _collect_transitive_mcp_deps, _deduplicate_mcp_deps, + _install_mcp_dependencies, _install_inline_mcp_deps, _write_inline_mcp_vscode, _write_inline_mcp_copilot, @@ -205,6 +206,49 @@ def test_lockfile_with_virtual_path(self): self.assertEqual(len(result), 1) self.assertEqual(result[0]["name"], "learn") + def test_lockfile_paths_do_not_use_full_rglob_scan(self): + """When lock-derived paths are available, avoid full recursive scanning.""" + with tempfile.TemporaryDirectory() as tmp: + apm_modules = Path(tmp) / "apm_modules" + locked_dir = apm_modules / "org" / "locked-pkg" + locked_dir.mkdir(parents=True) + (locked_dir / "apm.yml").write_text(yaml.dump({ + "name": "locked-pkg", + "version": "1.0.0", + "dependencies": {"mcp": ["ghcr.io/locked/server"]}, + })) + + lock_path = Path(tmp) / "apm.lock" + lock_path.write_text(yaml.dump({ + "lockfile_version": "1", + "dependencies": [ + {"repo_url": "org/locked-pkg", "host": "github.com"}, + ], + })) + + with patch("pathlib.Path.rglob", side_effect=AssertionError("rglob should not be called")): + result = _collect_transitive_mcp_deps(apm_modules, lock_path) + + self.assertEqual(result, ["ghcr.io/locked/server"]) + + def test_invalid_lockfile_falls_back_to_rglob_scan(self): + """If lock parsing fails, function falls back to scanning all apm.yml files.""" + with tempfile.TemporaryDirectory() as tmp: + apm_modules = Path(tmp) / "apm_modules" + pkg_dir = apm_modules / "org" / "pkg-a" + pkg_dir.mkdir(parents=True) + (pkg_dir / "apm.yml").write_text(yaml.dump({ + "name": "pkg-a", + "version": "1.0.0", + "dependencies": {"mcp": ["ghcr.io/a/server"]}, + })) + + lock_path = Path(tmp) / "apm.lock" + lock_path.write_text("dependencies: [") + + result = _collect_transitive_mcp_deps(apm_modules, lock_path) + self.assertEqual(result, ["ghcr.io/a/server"]) + # --------------------------------------------------------------------------- # _deduplicate_mcp_deps @@ -333,23 +377,56 @@ def test_installs_for_all_runtimes(self, _console, mock_copilot, mock_vscode): @patch("apm_cli.cli._write_inline_mcp_vscode") @patch("apm_cli.cli._write_inline_mcp_copilot") + @patch("apm_cli.cli._rich_warning") @patch("apm_cli.cli._get_console", return_value=None) - def test_skips_dep_without_name(self, _console, mock_copilot, mock_vscode): + def test_skips_dep_without_name( + self, _console, mock_warning, mock_copilot, mock_vscode + ): deps = [{"type": "sse", "url": "https://no-name"}] count = _install_inline_mcp_deps(deps, ["vscode"]) self.assertEqual(count, 0) mock_vscode.assert_not_called() + warning_msg = mock_warning.call_args[0][0] + self.assertIn("safe fields", warning_msg) + self.assertIn("url_present", warning_msg) + self.assertNotIn("https://no-name", warning_msg) + + @patch("apm_cli.cli._write_inline_mcp_vscode") + @patch("apm_cli.cli._rich_warning") + @patch("apm_cli.cli._get_console", return_value=None) + def test_missing_required_fields_warning_does_not_expose_header_values( + self, _console, mock_warning, mock_vscode + ): + deps = [ + { + "type": "sse", + "headers": {"Authorization": "Bearer secret-token"}, + } + ] + + count = _install_inline_mcp_deps(deps, ["vscode"]) + + self.assertEqual(count, 0) + mock_vscode.assert_not_called() + warning_msg = mock_warning.call_args[0][0] + self.assertIn("has_headers", warning_msg) + self.assertNotIn("secret-token", warning_msg) + self.assertNotIn("Authorization", warning_msg) @patch("apm_cli.cli._write_inline_mcp_vscode") @patch("apm_cli.cli._write_inline_mcp_copilot") + @patch("apm_cli.cli._rich_warning") @patch("apm_cli.cli._get_console", return_value=None) - def test_skips_dep_without_url(self, _console, mock_copilot, mock_vscode): + def test_skips_dep_without_url(self, _console, mock_warning, mock_copilot, mock_vscode): deps = [{"name": "srv"}] count = _install_inline_mcp_deps(deps, ["vscode"]) self.assertEqual(count, 0) mock_vscode.assert_not_called() + warning_msg = mock_warning.call_args[0][0] + self.assertIn("safe fields", warning_msg) + self.assertIn("has_headers", warning_msg) @patch("apm_cli.cli._write_inline_mcp_vscode") @patch("apm_cli.cli._get_console", return_value=None) @@ -385,5 +462,76 @@ def test_codex_runtime_uses_copilot_writer(self, _console, mock_copilot): mock_copilot.assert_called_once() +# --------------------------------------------------------------------------- +# _install_mcp_dependencies +# --------------------------------------------------------------------------- +class TestInstallMCPDependencies(unittest.TestCase): + + @patch("apm_cli.cli._get_console", return_value=None) + @patch("apm_cli.registry.operations.MCPServerOperations") + def test_already_configured_registry_servers_not_counted_as_new( + self, mock_ops_cls, _console + ): + mock_ops = mock_ops_cls.return_value + mock_ops.validate_servers_exist.return_value = (["ghcr.io/org/server"], []) + mock_ops.check_servers_needing_installation.return_value = [] + + count = _install_mcp_dependencies(["ghcr.io/org/server"], runtime="vscode") + + self.assertEqual(count, 0) + + @patch("apm_cli.cli._install_for_runtime") + @patch("apm_cli.cli._get_console", return_value=None) + @patch("apm_cli.registry.operations.MCPServerOperations") + def test_counts_only_newly_configured_registry_servers( + self, mock_ops_cls, _console, mock_install_runtime + ): + mock_ops = mock_ops_cls.return_value + mock_ops.validate_servers_exist.return_value = ( + ["ghcr.io/org/already", "ghcr.io/org/new"], + [], + ) + mock_ops.check_servers_needing_installation.return_value = ["ghcr.io/org/new"] + mock_ops.batch_fetch_server_info.return_value = {"ghcr.io/org/new": {}} + mock_ops.collect_environment_variables.return_value = {} + mock_ops.collect_runtime_variables.return_value = {} + + count = _install_mcp_dependencies( + ["ghcr.io/org/already", "ghcr.io/org/new"], runtime="vscode" + ) + + self.assertEqual(count, 1) + mock_install_runtime.assert_called_once() + + @patch("apm_cli.cli._install_for_runtime") + @patch("apm_cli.registry.operations.MCPServerOperations") + def test_mixed_registry_servers_show_already_configured_and_count_only_new( + self, mock_ops_cls, mock_install_runtime + ): + mock_console = unittest.mock.MagicMock() + mock_ops = mock_ops_cls.return_value + mock_ops.validate_servers_exist.return_value = ( + ["ghcr.io/org/already", "ghcr.io/org/new"], + [], + ) + mock_ops.check_servers_needing_installation.return_value = ["ghcr.io/org/new"] + mock_ops.batch_fetch_server_info.return_value = {"ghcr.io/org/new": {}} + mock_ops.collect_environment_variables.return_value = {} + mock_ops.collect_runtime_variables.return_value = {} + + with patch("apm_cli.cli._get_console", return_value=mock_console): + count = _install_mcp_dependencies( + ["ghcr.io/org/already", "ghcr.io/org/new"], runtime="vscode" + ) + + self.assertEqual(count, 1) + mock_install_runtime.assert_called_once() + printed_lines = "\n".join( + str(call.args[0]) for call in mock_console.print.call_args_list if call.args + ) + self.assertIn("ghcr.io/org/already", printed_lines) + self.assertIn("already configured", printed_lines) + + if __name__ == "__main__": unittest.main() From 6dd7babe0a957aae276e41afa251effce605574b Mon Sep 17 00:00:00 2001 From: sergio-sisternes-epam Date: Mon, 2 Mar 2026 22:20:22 +0000 Subject: [PATCH 6/9] fix: address PR #123 review feedback from danielmeppiel - Use LockFile.read() instead of raw yaml.safe_load() in _collect_transitive_mcp_deps (#1) - Guard against mcp:null in get_mcp_dependencies() (#2) - Remove inline MCP installation pipeline, defer to follow-up PR (#3/#7) - Remove redundant import builtins in _deduplicate_mcp_deps (#10) - Add tests for mcp:null, mcp:[], root-over-transitive dedup order (#9) - Remove tests for deleted inline pipeline functions --- src/apm_cli/cli.py | 162 ++--------------------- src/apm_cli/models/apm_package.py | 2 +- tests/unit/test_transitive_mcp.py | 213 +++++------------------------- 3 files changed, 42 insertions(+), 335 deletions(-) diff --git a/src/apm_cli/cli.py b/src/apm_cli/cli.py index 405d2f07f..939b02e5d 100644 --- a/src/apm_cli/cli.py +++ b/src/apm_cli/cli.py @@ -2171,20 +2171,13 @@ def _collect_transitive_mcp_deps(apm_modules_dir: Path, lock_path: Path = None) # Build set of expected apm.yml paths from apm.lock locked_paths = None if lock_path and lock_path.exists(): - try: - import yaml - - with open(lock_path, "r", encoding="utf-8") as f: - lock_data = yaml.safe_load(f) or {} + lockfile = LockFile.read(lock_path) + if lockfile is not None: locked_paths = builtins.set() - for dep in lock_data.get("dependencies", []): - repo_url = dep.get("repo_url", "") - virtual_path = dep.get("virtual_path", "") - if repo_url: - yml = apm_modules_dir / repo_url / virtual_path / "apm.yml" if virtual_path else apm_modules_dir / repo_url / "apm.yml" + for dep in lockfile.get_all_dependencies(): + if dep.repo_url: + yml = apm_modules_dir / dep.repo_url / dep.virtual_path / "apm.yml" if dep.virtual_path else apm_modules_dir / dep.repo_url / "apm.yml" locked_paths.add(yml.resolve()) - except Exception: - locked_paths = None # Fall back to full scan # Prefer iterating lock-derived paths directly (existing files only). # Fall back to full scan only when lock parsing is unavailable. @@ -2217,8 +2210,6 @@ def _deduplicate_mcp_deps(deps: list) -> list: Returns: Deduplicated list preserving insertion order. """ - import builtins - seen_strings: builtins.set = builtins.set() seen_names: builtins.set = builtins.set() result = [] @@ -2488,8 +2479,9 @@ def _install_mcp_dependencies( # --- Inline dict deps (name/type/url) --- if inline_deps: - inline_count = _install_inline_mcp_deps(inline_deps, target_runtimes, verbose) - configured_count += inline_count + _rich_warning( + f"Skipping {len(inline_deps)} inline MCP dep(s) — inline dict installation deferred to follow-up PR" + ) # Close the panel if console: @@ -2503,144 +2495,6 @@ def _install_mcp_dependencies( return configured_count -def _install_inline_mcp_deps( - inline_deps: list, target_runtimes: list, verbose: bool = False -) -> int: - """Install inline MCP dependencies (dict configs) directly into runtime configs. - - Inline deps look like: {"name": "my-server", "type": "sse", "url": "https://..."} - They bypass the MCP registry and are written directly. - - Args: - inline_deps: List of dict MCP entries. - target_runtimes: List of target runtime names. - verbose: Show detailed output. - - Returns: - int: Number of servers successfully configured. - """ - _KNOWN_MCP_TYPES = {"sse", "http"} - - console = _get_console() - configured = 0 - - for dep in inline_deps: - name = dep.get("name", "") - server_type = dep.get("type", "sse") - url = dep.get("url", "") - headers = dep.get("headers", {}) - - if not name or not url: - safe_dep = { - "name": name or None, - "type": server_type or None, - "url_present": bool(url), - "has_headers": bool(headers), - } - _rich_warning( - "Skipping inline MCP dep with missing required fields " - f"(safe fields: {safe_dep})" - ) - continue - - if server_type not in _KNOWN_MCP_TYPES: - _rich_warning(f"MCP server '{name}' has unknown type '{server_type}' (expected one of {_KNOWN_MCP_TYPES})") - - # Build the server config entry - server_config = {"type": server_type, "url": url} - if headers: - server_config["headers"] = headers - - any_success = False - for rt in target_runtimes: - try: - if rt == "vscode": - _write_inline_mcp_vscode(name, server_config, verbose) - elif rt == "copilot": - _write_inline_mcp_copilot(name, server_config, verbose) - elif rt == "codex": - _write_inline_mcp_copilot(name, server_config, verbose) - else: - if verbose: - _rich_warning(f"Unsupported runtime '{rt}' for inline MCP config") - continue - any_success = True - except Exception as e: - _rich_error(f"Failed to configure inline MCP '{name}' for {rt}: {e}") - continue - - if any_success: - if console: - console.print( - f"│ [green]✓[/green] {name} (inline) → {', '.join([rt.title() for rt in target_runtimes])}" - ) - configured += 1 - - return configured - - -def _write_inline_mcp_vscode(name: str, server_config: dict, verbose: bool = False): - """Write an inline MCP server config to .vscode/mcp.json.""" - import json - from pathlib import Path - - vscode_dir = Path.cwd() / ".vscode" - vscode_dir.mkdir(parents=True, exist_ok=True) - mcp_path = vscode_dir / "mcp.json" - - config = {} - if mcp_path.exists(): - try: - with open(mcp_path, "r", encoding="utf-8") as f: - config = json.load(f) - except (json.JSONDecodeError, IOError) as e: - _rich_warning(f"Could not parse {mcp_path}, resetting: {e}") - config = {} - - if "servers" not in config: - config["servers"] = {} - - if name in config["servers"]: - if verbose: - _rich_info(f" {name} already in .vscode/mcp.json, updating") - - config["servers"][name] = server_config - - with open(mcp_path, "w", encoding="utf-8") as f: - json.dump(config, f, indent=2) - - -def _write_inline_mcp_copilot(name: str, server_config: dict, verbose: bool = False): - """Write an inline MCP server config to ~/.copilot/mcp-config.json.""" - import json - from pathlib import Path - - copilot_dir = Path.home() / ".copilot" - copilot_dir.mkdir(parents=True, exist_ok=True) - config_path = copilot_dir / "mcp-config.json" - - config = {} - if config_path.exists(): - try: - with open(config_path, "r", encoding="utf-8") as f: - config = json.load(f) - except (json.JSONDecodeError, IOError) as e: - _rich_warning(f"Could not parse {config_path}, resetting: {e}") - config = {} - - if "mcpServers" not in config: - config["mcpServers"] = {} - - if name in config["mcpServers"]: - if verbose: - _rich_info(f" {name} already in copilot config, updating") - - config["mcpServers"][name] = server_config - - with open(config_path, "w", encoding="utf-8") as f: - json.dump(config, f, indent=2) - - def _show_install_summary( apm_count: int, prompt_count: int, agent_count: int, mcp_count: int, apm_config ): diff --git a/src/apm_cli/models/apm_package.py b/src/apm_cli/models/apm_package.py index 86c3b7945..5e1f7a331 100644 --- a/src/apm_cli/models/apm_package.py +++ b/src/apm_cli/models/apm_package.py @@ -801,7 +801,7 @@ def get_mcp_dependencies(self) -> List[Union[str, dict]]: """Get list of MCP dependencies (strings for registry, dicts for inline configs).""" if not self.dependencies or 'mcp' not in self.dependencies: return [] - return [dep for dep in self.dependencies.get('mcp', []) + return [dep for dep in (self.dependencies.get('mcp') or []) if isinstance(dep, (str, dict))] def has_apm_dependencies(self) -> bool: diff --git a/tests/unit/test_transitive_mcp.py b/tests/unit/test_transitive_mcp.py index cb558a7f0..72e5d900d 100644 --- a/tests/unit/test_transitive_mcp.py +++ b/tests/unit/test_transitive_mcp.py @@ -1,6 +1,5 @@ """Tests for transitive MCP dependency collection, deduplication, and inline installation.""" -import json import tempfile import unittest from pathlib import Path @@ -13,9 +12,6 @@ _collect_transitive_mcp_deps, _deduplicate_mcp_deps, _install_mcp_dependencies, - _install_inline_mcp_deps, - _write_inline_mcp_vscode, - _write_inline_mcp_copilot, ) @@ -84,6 +80,30 @@ def test_no_mcp_section(self): pkg = APMPackage.from_apm_yml(yml) self.assertEqual(pkg.get_mcp_dependencies(), []) + def test_mcp_null_returns_empty(self): + """mcp: null should return empty list, not raise TypeError.""" + with tempfile.TemporaryDirectory() as tmp: + yml = Path(tmp) / "apm.yml" + yml.write_text(yaml.dump({ + "name": "pkg", + "version": "1.0.0", + "dependencies": {"mcp": None}, + })) + pkg = APMPackage.from_apm_yml(yml) + self.assertEqual(pkg.get_mcp_dependencies(), []) + + def test_mcp_empty_list_returns_empty(self): + """mcp: [] should return empty list.""" + with tempfile.TemporaryDirectory() as tmp: + yml = Path(tmp) / "apm.yml" + yml.write_text(yaml.dump({ + "name": "pkg", + "version": "1.0.0", + "dependencies": {"mcp": []}, + })) + pkg = APMPackage.from_apm_yml(yml) + self.assertEqual(pkg.get_mcp_dependencies(), []) + # --------------------------------------------------------------------------- # _collect_transitive_mcp_deps @@ -284,182 +304,15 @@ def test_dict_without_name_kept(self): result = _deduplicate_mcp_deps([d, d]) self.assertEqual(len(result), 1) - -# --------------------------------------------------------------------------- -# _write_inline_mcp_vscode -# --------------------------------------------------------------------------- -class TestWriteInlineMCPVscode(unittest.TestCase): - - def test_creates_file_from_scratch(self): - with tempfile.TemporaryDirectory() as tmp: - with patch("apm_cli.cli.Path.cwd", return_value=Path(tmp)): - _write_inline_mcp_vscode("test-srv", {"type": "sse", "url": "https://x"}) - - mcp_path = Path(tmp) / ".vscode" / "mcp.json" - self.assertTrue(mcp_path.exists()) - data = json.loads(mcp_path.read_text()) - self.assertIn("test-srv", data["servers"]) - self.assertEqual(data["servers"]["test-srv"]["url"], "https://x") - - def test_merges_into_existing_file(self): - with tempfile.TemporaryDirectory() as tmp: - vscode_dir = Path(tmp) / ".vscode" - vscode_dir.mkdir() - mcp_path = vscode_dir / "mcp.json" - mcp_path.write_text(json.dumps({"servers": {"existing": {"type": "stdio"}}})) - - with patch("apm_cli.cli.Path.cwd", return_value=Path(tmp)): - _write_inline_mcp_vscode("new-srv", {"type": "sse", "url": "https://new"}) - - data = json.loads(mcp_path.read_text()) - self.assertIn("existing", data["servers"]) - self.assertIn("new-srv", data["servers"]) - - def test_overwrites_same_name(self): - with tempfile.TemporaryDirectory() as tmp: - vscode_dir = Path(tmp) / ".vscode" - vscode_dir.mkdir() - mcp_path = vscode_dir / "mcp.json" - mcp_path.write_text(json.dumps({"servers": {"srv": {"type": "sse", "url": "https://old"}}})) - - with patch("apm_cli.cli.Path.cwd", return_value=Path(tmp)): - _write_inline_mcp_vscode("srv", {"type": "sse", "url": "https://new"}) - - data = json.loads(mcp_path.read_text()) - self.assertEqual(data["servers"]["srv"]["url"], "https://new") - - -# --------------------------------------------------------------------------- -# _write_inline_mcp_copilot -# --------------------------------------------------------------------------- -class TestWriteInlineMCPCopilot(unittest.TestCase): - - def test_creates_file_from_scratch(self): - with tempfile.TemporaryDirectory() as tmp: - with patch("apm_cli.cli.Path.home", return_value=Path(tmp)): - _write_inline_mcp_copilot("cp-srv", {"type": "sse", "url": "https://cp"}) - - config_path = Path(tmp) / ".copilot" / "mcp-config.json" - self.assertTrue(config_path.exists()) - data = json.loads(config_path.read_text()) - self.assertIn("cp-srv", data["mcpServers"]) - - def test_merges_into_existing_file(self): - with tempfile.TemporaryDirectory() as tmp: - copilot_dir = Path(tmp) / ".copilot" - copilot_dir.mkdir() - config_path = copilot_dir / "mcp-config.json" - config_path.write_text(json.dumps({"mcpServers": {"old": {"type": "stdio"}}})) - - with patch("apm_cli.cli.Path.home", return_value=Path(tmp)): - _write_inline_mcp_copilot("new", {"type": "sse", "url": "https://new"}) - - data = json.loads(config_path.read_text()) - self.assertIn("old", data["mcpServers"]) - self.assertIn("new", data["mcpServers"]) - - -# --------------------------------------------------------------------------- -# _install_inline_mcp_deps -# --------------------------------------------------------------------------- -class TestInstallInlineMCPDeps(unittest.TestCase): - - @patch("apm_cli.cli._write_inline_mcp_vscode") - @patch("apm_cli.cli._write_inline_mcp_copilot") - @patch("apm_cli.cli._get_console", return_value=None) - def test_installs_for_all_runtimes(self, _console, mock_copilot, mock_vscode): - deps = [{"name": "s1", "type": "sse", "url": "https://s1"}] - count = _install_inline_mcp_deps(deps, ["vscode", "copilot"]) - - self.assertEqual(count, 1) - mock_vscode.assert_called_once() - mock_copilot.assert_called_once() - - @patch("apm_cli.cli._write_inline_mcp_vscode") - @patch("apm_cli.cli._write_inline_mcp_copilot") - @patch("apm_cli.cli._rich_warning") - @patch("apm_cli.cli._get_console", return_value=None) - def test_skips_dep_without_name( - self, _console, mock_warning, mock_copilot, mock_vscode - ): - deps = [{"type": "sse", "url": "https://no-name"}] - count = _install_inline_mcp_deps(deps, ["vscode"]) - - self.assertEqual(count, 0) - mock_vscode.assert_not_called() - warning_msg = mock_warning.call_args[0][0] - self.assertIn("safe fields", warning_msg) - self.assertIn("url_present", warning_msg) - self.assertNotIn("https://no-name", warning_msg) - - @patch("apm_cli.cli._write_inline_mcp_vscode") - @patch("apm_cli.cli._rich_warning") - @patch("apm_cli.cli._get_console", return_value=None) - def test_missing_required_fields_warning_does_not_expose_header_values( - self, _console, mock_warning, mock_vscode - ): - deps = [ - { - "type": "sse", - "headers": {"Authorization": "Bearer secret-token"}, - } - ] - - count = _install_inline_mcp_deps(deps, ["vscode"]) - - self.assertEqual(count, 0) - mock_vscode.assert_not_called() - warning_msg = mock_warning.call_args[0][0] - self.assertIn("has_headers", warning_msg) - self.assertNotIn("secret-token", warning_msg) - self.assertNotIn("Authorization", warning_msg) - - @patch("apm_cli.cli._write_inline_mcp_vscode") - @patch("apm_cli.cli._write_inline_mcp_copilot") - @patch("apm_cli.cli._rich_warning") - @patch("apm_cli.cli._get_console", return_value=None) - def test_skips_dep_without_url(self, _console, mock_warning, mock_copilot, mock_vscode): - deps = [{"name": "srv"}] - count = _install_inline_mcp_deps(deps, ["vscode"]) - - self.assertEqual(count, 0) - mock_vscode.assert_not_called() - warning_msg = mock_warning.call_args[0][0] - self.assertIn("safe fields", warning_msg) - self.assertIn("has_headers", warning_msg) - - @patch("apm_cli.cli._write_inline_mcp_vscode") - @patch("apm_cli.cli._get_console", return_value=None) - def test_includes_headers_when_present(self, _console, mock_vscode): - deps = [{"name": "s", "type": "sse", "url": "https://s", "headers": {"Authorization": "Bearer x"}}] - _install_inline_mcp_deps(deps, ["vscode"]) - - call_args = mock_vscode.call_args - server_config = call_args[0][1] - self.assertIn("headers", server_config) - self.assertEqual(server_config["headers"]["Authorization"], "Bearer x") - - @patch("apm_cli.cli._write_inline_mcp_vscode", side_effect=Exception("write failed")) - @patch("apm_cli.cli._get_console", return_value=None) - def test_continues_on_write_failure(self, _console, mock_vscode): - """Failure writing one dep should not prevent the next from being attempted.""" - deps = [ - {"name": "fail", "type": "sse", "url": "https://fail"}, - {"name": "ok", "type": "sse", "url": "https://ok"}, - ] - # Both raise, so no deps are successfully configured - count = _install_inline_mcp_deps(deps, ["vscode"]) - self.assertEqual(count, 0) - # But both were attempted - self.assertEqual(mock_vscode.call_count, 2) - - @patch("apm_cli.cli._write_inline_mcp_copilot") - @patch("apm_cli.cli._get_console", return_value=None) - def test_codex_runtime_uses_copilot_writer(self, _console, mock_copilot): - deps = [{"name": "s", "type": "sse", "url": "https://s"}] - _install_inline_mcp_deps(deps, ["codex"]) - - mock_copilot.assert_called_once() + def test_root_deps_take_precedence_over_transitive(self): + """When root and transitive share a key, the first (root) wins.""" + root = [{"name": "shared", "type": "sse", "url": "https://root-url"}] + transitive = [{"name": "shared", "type": "sse", "url": "https://transitive-url"}] + # Root deps come first in the combined list + combined = root + transitive + result = _deduplicate_mcp_deps(combined) + self.assertEqual(len(result), 1) + self.assertEqual(result[0]["url"], "https://root-url") # --------------------------------------------------------------------------- From 0d5e529860bd016c1485b91b2fd3a3fb2583e5ea Mon Sep 17 00:00:00 2001 From: sergio-sisternes-epam Date: Mon, 2 Mar 2026 22:28:25 +0000 Subject: [PATCH 7/9] feat: implement adapter-delegated inline MCP installation (#4/#5/#6) - Add _validate_inline_url() with https/http scheme allowlist - Add _install_inline_mcp_deps() delegating to ClientFactory adapters - VSCode: read-merge-write via adapter (full-overwrite API) - Copilot/Codex: pass merge dict via adapter update_config() - 15 new tests covering adapter delegation, URL validation, error cases - All 866 tests pass --- src/apm_cli/cli.py | 109 ++++++++++++++++++++- tests/unit/test_transitive_mcp.py | 151 +++++++++++++++++++++++++++++- 2 files changed, 256 insertions(+), 4 deletions(-) diff --git a/src/apm_cli/cli.py b/src/apm_cli/cli.py index 939b02e5d..7457c4eff 100644 --- a/src/apm_cli/cli.py +++ b/src/apm_cli/cli.py @@ -2479,9 +2479,8 @@ def _install_mcp_dependencies( # --- Inline dict deps (name/type/url) --- if inline_deps: - _rich_warning( - f"Skipping {len(inline_deps)} inline MCP dep(s) — inline dict installation deferred to follow-up PR" - ) + inline_count = _install_inline_mcp_deps(inline_deps, target_runtimes, verbose) + configured_count += inline_count # Close the panel if console: @@ -2495,6 +2494,110 @@ def _install_mcp_dependencies( return configured_count +def _validate_inline_url(url: str, dep_name: str) -> bool: + """Validate that an inline MCP dep URL uses an allowed scheme. + + Blocks file://, data:, javascript:, etc. from transitive packages. + + Args: + url: The URL to validate. + dep_name: Name of the dep (for warning messages). + + Returns: + True if the URL scheme is allowed, False otherwise. + """ + from urllib.parse import urlparse + + _ALLOWED_SCHEMES = {"https", "http"} + parsed = urlparse(url) + if parsed.scheme not in _ALLOWED_SCHEMES: + _rich_warning( + f"Skipping inline MCP '{dep_name}': disallowed URL scheme '{parsed.scheme}'" + ) + return False + return True + + +def _install_inline_mcp_deps( + inline_deps: list, target_runtimes: list, verbose: bool = False +) -> int: + """Install inline MCP dependencies via runtime client adapters. + + Delegates JSON/TOML I/O to the existing ClientFactory adapters + (VSCodeClientAdapter, CopilotClientAdapter, CodexClientAdapter) + instead of reimplementing config file handling. + + Args: + inline_deps: List of dict MCP entries. + target_runtimes: List of target runtime names. + verbose: Show detailed output. + + Returns: + int: Number of servers successfully configured. + """ + from apm_cli.factory import ClientFactory + + console = _get_console() + configured = 0 + + for dep in inline_deps: + name = dep.get("name", "") + server_type = dep.get("type", "sse") + url = dep.get("url", "") + headers = dep.get("headers", {}) + + if not name or not url: + safe_dep = { + "name": name or None, + "type": server_type or None, + "url_present": bool(url), + "has_headers": bool(headers), + } + _rich_warning( + "Skipping inline MCP dep with missing required fields " + f"(safe fields: {safe_dep})" + ) + continue + + # URL scheme validation (#6) + if not _validate_inline_url(url, name): + continue + + server_config = {"type": server_type, "url": url} + if headers: + server_config["headers"] = headers + + any_success = False + for rt in target_runtimes: + try: + adapter = ClientFactory.create_client(rt) + if rt == "vscode": + # VSCode adapter.update_config() is a full overwrite; + # read-merge-write to preserve existing servers. + config = adapter.get_current_config() + if "servers" not in config: + config["servers"] = {} + config["servers"][name] = server_config + adapter.update_config(config) + else: + # Copilot and Codex adapters merge internally. + adapter.update_config({name: server_config}) + any_success = True + except Exception as e: + _rich_error(f"Failed to configure inline MCP '{name}' for {rt}: {e}") + continue + + if any_success: + if console: + console.print( + f"│ [green]✓[/green] {name} (inline) → " + f"{', '.join([rt.title() for rt in target_runtimes])}" + ) + configured += 1 + + return configured + + def _show_install_summary( apm_count: int, prompt_count: int, agent_count: int, mcp_count: int, apm_config ): diff --git a/tests/unit/test_transitive_mcp.py b/tests/unit/test_transitive_mcp.py index 72e5d900d..deb059fdd 100644 --- a/tests/unit/test_transitive_mcp.py +++ b/tests/unit/test_transitive_mcp.py @@ -1,9 +1,10 @@ """Tests for transitive MCP dependency collection, deduplication, and inline installation.""" +import json import tempfile import unittest from pathlib import Path -from unittest.mock import patch +from unittest.mock import patch, MagicMock import yaml @@ -11,7 +12,9 @@ from apm_cli.cli import ( _collect_transitive_mcp_deps, _deduplicate_mcp_deps, + _install_inline_mcp_deps, _install_mcp_dependencies, + _validate_inline_url, ) @@ -315,6 +318,152 @@ def test_root_deps_take_precedence_over_transitive(self): self.assertEqual(result[0]["url"], "https://root-url") +# --------------------------------------------------------------------------- +# _validate_inline_url +# --------------------------------------------------------------------------- +class TestValidateInlineUrl(unittest.TestCase): + + def test_allows_https(self): + self.assertTrue(_validate_inline_url("https://example.com/mcp", "srv")) + + def test_allows_http(self): + self.assertTrue(_validate_inline_url("http://localhost:8080", "srv")) + + @patch("apm_cli.cli._rich_warning") + def test_rejects_file_scheme(self, mock_warn): + self.assertFalse(_validate_inline_url("file:///etc/passwd", "bad")) + mock_warn.assert_called_once() + self.assertIn("disallowed URL scheme", mock_warn.call_args[0][0]) + + @patch("apm_cli.cli._rich_warning") + def test_rejects_data_scheme(self, mock_warn): + self.assertFalse(_validate_inline_url("data:text/html,

hi

", "bad")) + + @patch("apm_cli.cli._rich_warning") + def test_rejects_empty_scheme(self, mock_warn): + self.assertFalse(_validate_inline_url("no-scheme-url", "bad")) + + +# --------------------------------------------------------------------------- +# _install_inline_mcp_deps +# --------------------------------------------------------------------------- +class TestInstallInlineMCPDeps(unittest.TestCase): + + @patch("apm_cli.cli._get_console", return_value=None) + @patch("apm_cli.factory.ClientFactory") + def test_delegates_to_vscode_adapter(self, mock_factory, _console): + mock_adapter = MagicMock() + mock_adapter.get_current_config.return_value = {"servers": {}} + mock_factory.create_client.return_value = mock_adapter + + deps = [{"name": "s1", "type": "sse", "url": "https://s1.example.com"}] + count = _install_inline_mcp_deps(deps, ["vscode"]) + + self.assertEqual(count, 1) + mock_factory.create_client.assert_called_with("vscode") + # VSCode path: read-merge-write with full config + mock_adapter.get_current_config.assert_called_once() + call_config = mock_adapter.update_config.call_args[0][0] + self.assertIn("s1", call_config["servers"]) + self.assertEqual(call_config["servers"]["s1"]["url"], "https://s1.example.com") + + @patch("apm_cli.cli._get_console", return_value=None) + @patch("apm_cli.factory.ClientFactory") + def test_delegates_to_copilot_adapter(self, mock_factory, _console): + mock_adapter = MagicMock() + mock_factory.create_client.return_value = mock_adapter + + deps = [{"name": "s1", "type": "sse", "url": "https://s1.example.com"}] + count = _install_inline_mcp_deps(deps, ["copilot"]) + + self.assertEqual(count, 1) + mock_factory.create_client.assert_called_with("copilot") + # Copilot path: merge dict passed directly + call_config = mock_adapter.update_config.call_args[0][0] + self.assertIn("s1", call_config) + + @patch("apm_cli.cli._get_console", return_value=None) + @patch("apm_cli.factory.ClientFactory") + def test_codex_uses_own_adapter_not_copilot(self, mock_factory, _console): + mock_adapter = MagicMock() + mock_factory.create_client.return_value = mock_adapter + + deps = [{"name": "s1", "type": "sse", "url": "https://s1.example.com"}] + count = _install_inline_mcp_deps(deps, ["codex"]) + + self.assertEqual(count, 1) + mock_factory.create_client.assert_called_with("codex") + # Codex path: merge dict (adapter writes TOML internally) + call_config = mock_adapter.update_config.call_args[0][0] + self.assertIn("s1", call_config) + + @patch("apm_cli.cli._get_console", return_value=None) + @patch("apm_cli.factory.ClientFactory") + def test_installs_for_multiple_runtimes(self, mock_factory, _console): + mock_adapter = MagicMock() + mock_adapter.get_current_config.return_value = {"servers": {}} + mock_factory.create_client.return_value = mock_adapter + + deps = [{"name": "s1", "type": "sse", "url": "https://s1.example.com"}] + count = _install_inline_mcp_deps(deps, ["vscode", "copilot", "codex"]) + + self.assertEqual(count, 1) + self.assertEqual(mock_factory.create_client.call_count, 3) + + @patch("apm_cli.cli._rich_warning") + @patch("apm_cli.cli._get_console", return_value=None) + def test_skips_dep_without_name(self, _console, mock_warn): + deps = [{"type": "sse", "url": "https://no-name"}] + count = _install_inline_mcp_deps(deps, ["vscode"]) + self.assertEqual(count, 0) + self.assertIn("safe fields", mock_warn.call_args[0][0]) + self.assertNotIn("https://no-name", mock_warn.call_args[0][0]) + + @patch("apm_cli.cli._rich_warning") + @patch("apm_cli.cli._get_console", return_value=None) + def test_skips_dep_with_disallowed_scheme(self, _console, mock_warn): + deps = [{"name": "bad", "type": "sse", "url": "file:///etc/passwd"}] + count = _install_inline_mcp_deps(deps, ["vscode"]) + self.assertEqual(count, 0) + self.assertIn("disallowed URL scheme", mock_warn.call_args[0][0]) + + @patch("apm_cli.cli._get_console", return_value=None) + @patch("apm_cli.factory.ClientFactory") + def test_includes_headers_in_server_config(self, mock_factory, _console): + mock_adapter = MagicMock() + mock_adapter.get_current_config.return_value = {"servers": {}} + mock_factory.create_client.return_value = mock_adapter + + deps = [{"name": "s1", "type": "sse", "url": "https://s1", "headers": {"Authorization": "Bearer x"}}] + _install_inline_mcp_deps(deps, ["vscode"]) + + call_config = mock_adapter.update_config.call_args[0][0] + self.assertIn("headers", call_config["servers"]["s1"]) + + @patch("apm_cli.cli._get_console", return_value=None) + @patch("apm_cli.factory.ClientFactory") + def test_continues_on_adapter_failure(self, mock_factory, _console): + mock_adapter = MagicMock() + mock_adapter.get_current_config.side_effect = Exception("write failed") + mock_factory.create_client.return_value = mock_adapter + + deps = [ + {"name": "fail", "type": "sse", "url": "https://fail"}, + {"name": "also-fail", "type": "sse", "url": "https://also"}, + ] + count = _install_inline_mcp_deps(deps, ["vscode"]) + self.assertEqual(count, 0) + + @patch("apm_cli.cli._rich_warning") + @patch("apm_cli.cli._get_console", return_value=None) + def test_missing_fields_warning_does_not_expose_headers(self, _console, mock_warn): + deps = [{"type": "sse", "headers": {"Authorization": "Bearer secret"}}] + _install_inline_mcp_deps(deps, ["vscode"]) + warning_msg = mock_warn.call_args[0][0] + self.assertNotIn("secret", warning_msg) + self.assertNotIn("Authorization", warning_msg) + + # --------------------------------------------------------------------------- # _install_mcp_dependencies # --------------------------------------------------------------------------- From 05861c6ed88591823aaebbc82eb688b01d5b2577 Mon Sep 17 00:00:00 2001 From: sergio-sisternes-epam Date: Mon, 2 Mar 2026 22:37:12 +0000 Subject: [PATCH 8/9] refactor: convert test_transitive_mcp.py to pytest-native style (#8) - Drop unittest.TestCase from all 6 test classes - Replace self.assert* with bare assert statements - Replace tempfile.TemporaryDirectory with tmp_path fixture - Remove unused imports (tempfile, unittest) --- tests/unit/test_transitive_mcp.py | 523 ++++++++++++++---------------- 1 file changed, 251 insertions(+), 272 deletions(-) diff --git a/tests/unit/test_transitive_mcp.py b/tests/unit/test_transitive_mcp.py index deb059fdd..98b2b8799 100644 --- a/tests/unit/test_transitive_mcp.py +++ b/tests/unit/test_transitive_mcp.py @@ -1,8 +1,6 @@ """Tests for transitive MCP dependency collection, deduplication, and inline installation.""" import json -import tempfile -import unittest from pathlib import Path from unittest.mock import patch, MagicMock @@ -21,291 +19,276 @@ # --------------------------------------------------------------------------- # APMPackage – MCP dict parsing # --------------------------------------------------------------------------- -class TestAPMPackageMCPParsing(unittest.TestCase): +class TestAPMPackageMCPParsing: """Ensure apm_package preserves both string and dict MCP entries.""" - def test_parse_string_mcp_deps(self): + def test_parse_string_mcp_deps(self, tmp_path): """String-only MCP deps parse correctly.""" - with tempfile.TemporaryDirectory() as tmp: - yml = Path(tmp) / "apm.yml" - yml.write_text(yaml.dump({ - "name": "pkg", - "version": "1.0.0", - "dependencies": {"mcp": ["ghcr.io/some/server"]}, - })) - pkg = APMPackage.from_apm_yml(yml) - deps = pkg.get_mcp_dependencies() - - self.assertEqual(deps, ["ghcr.io/some/server"]) - - def test_parse_dict_mcp_deps(self): + yml = tmp_path / "apm.yml" + yml.write_text(yaml.dump({ + "name": "pkg", + "version": "1.0.0", + "dependencies": {"mcp": ["ghcr.io/some/server"]}, + })) + pkg = APMPackage.from_apm_yml(yml) + deps = pkg.get_mcp_dependencies() + + assert deps == ["ghcr.io/some/server"] + + def test_parse_dict_mcp_deps(self, tmp_path): """Inline dict MCP deps are preserved.""" inline = {"name": "my-srv", "type": "sse", "url": "https://example.com"} - with tempfile.TemporaryDirectory() as tmp: - yml = Path(tmp) / "apm.yml" - yml.write_text(yaml.dump({ - "name": "pkg", - "version": "1.0.0", - "dependencies": {"mcp": [inline]}, - })) - pkg = APMPackage.from_apm_yml(yml) - deps = pkg.get_mcp_dependencies() - - self.assertEqual(len(deps), 1) - self.assertIsInstance(deps[0], dict) - self.assertEqual(deps[0]["name"], "my-srv") - - def test_parse_mixed_mcp_deps(self): + yml = tmp_path / "apm.yml" + yml.write_text(yaml.dump({ + "name": "pkg", + "version": "1.0.0", + "dependencies": {"mcp": [inline]}, + })) + pkg = APMPackage.from_apm_yml(yml) + deps = pkg.get_mcp_dependencies() + + assert len(deps) == 1 + assert isinstance(deps[0], dict) + assert deps[0]["name"] == "my-srv" + + def test_parse_mixed_mcp_deps(self, tmp_path): """A mix of string and dict entries is preserved in order.""" inline = {"name": "inline-srv", "type": "http", "url": "https://x"} - with tempfile.TemporaryDirectory() as tmp: - yml = Path(tmp) / "apm.yml" - yml.write_text(yaml.dump({ - "name": "pkg", - "version": "1.0.0", - "dependencies": {"mcp": ["registry-srv", inline]}, - })) - pkg = APMPackage.from_apm_yml(yml) - deps = pkg.get_mcp_dependencies() - - self.assertEqual(len(deps), 2) - self.assertIsInstance(deps[0], str) - self.assertIsInstance(deps[1], dict) - - def test_no_mcp_section(self): + yml = tmp_path / "apm.yml" + yml.write_text(yaml.dump({ + "name": "pkg", + "version": "1.0.0", + "dependencies": {"mcp": ["registry-srv", inline]}, + })) + pkg = APMPackage.from_apm_yml(yml) + deps = pkg.get_mcp_dependencies() + + assert len(deps) == 2 + assert isinstance(deps[0], str) + assert isinstance(deps[1], dict) + + def test_no_mcp_section(self, tmp_path): """Missing MCP section returns empty list.""" - with tempfile.TemporaryDirectory() as tmp: - yml = Path(tmp) / "apm.yml" - yml.write_text(yaml.dump({ - "name": "pkg", - "version": "1.0.0", - })) - pkg = APMPackage.from_apm_yml(yml) - self.assertEqual(pkg.get_mcp_dependencies(), []) - - def test_mcp_null_returns_empty(self): + yml = tmp_path / "apm.yml" + yml.write_text(yaml.dump({ + "name": "pkg", + "version": "1.0.0", + })) + pkg = APMPackage.from_apm_yml(yml) + assert pkg.get_mcp_dependencies() == [] + + def test_mcp_null_returns_empty(self, tmp_path): """mcp: null should return empty list, not raise TypeError.""" - with tempfile.TemporaryDirectory() as tmp: - yml = Path(tmp) / "apm.yml" - yml.write_text(yaml.dump({ - "name": "pkg", - "version": "1.0.0", - "dependencies": {"mcp": None}, - })) - pkg = APMPackage.from_apm_yml(yml) - self.assertEqual(pkg.get_mcp_dependencies(), []) - - def test_mcp_empty_list_returns_empty(self): + yml = tmp_path / "apm.yml" + yml.write_text(yaml.dump({ + "name": "pkg", + "version": "1.0.0", + "dependencies": {"mcp": None}, + })) + pkg = APMPackage.from_apm_yml(yml) + assert pkg.get_mcp_dependencies() == [] + + def test_mcp_empty_list_returns_empty(self, tmp_path): """mcp: [] should return empty list.""" - with tempfile.TemporaryDirectory() as tmp: - yml = Path(tmp) / "apm.yml" - yml.write_text(yaml.dump({ - "name": "pkg", - "version": "1.0.0", - "dependencies": {"mcp": []}, - })) - pkg = APMPackage.from_apm_yml(yml) - self.assertEqual(pkg.get_mcp_dependencies(), []) + yml = tmp_path / "apm.yml" + yml.write_text(yaml.dump({ + "name": "pkg", + "version": "1.0.0", + "dependencies": {"mcp": []}, + })) + pkg = APMPackage.from_apm_yml(yml) + assert pkg.get_mcp_dependencies() == [] # --------------------------------------------------------------------------- # _collect_transitive_mcp_deps # --------------------------------------------------------------------------- -class TestCollectTransitiveMCPDeps(unittest.TestCase): +class TestCollectTransitiveMCPDeps: """Tests for scanning apm_modules/ for MCP deps.""" - def test_empty_when_dir_missing(self): - with tempfile.TemporaryDirectory() as tmp: - result = _collect_transitive_mcp_deps(Path(tmp) / "nonexistent") - self.assertEqual(result, []) - - def test_collects_string_deps(self): - with tempfile.TemporaryDirectory() as tmp: - pkg_dir = Path(tmp) / "org" / "pkg-a" - pkg_dir.mkdir(parents=True) - (pkg_dir / "apm.yml").write_text(yaml.dump({ - "name": "pkg-a", - "version": "1.0.0", - "dependencies": {"mcp": ["ghcr.io/a/server"]}, - })) - result = _collect_transitive_mcp_deps(Path(tmp)) - self.assertEqual(result, ["ghcr.io/a/server"]) - - def test_collects_dict_deps(self): + def test_empty_when_dir_missing(self, tmp_path): + result = _collect_transitive_mcp_deps(tmp_path / "nonexistent") + assert result == [] + + def test_collects_string_deps(self, tmp_path): + pkg_dir = tmp_path / "org" / "pkg-a" + pkg_dir.mkdir(parents=True) + (pkg_dir / "apm.yml").write_text(yaml.dump({ + "name": "pkg-a", + "version": "1.0.0", + "dependencies": {"mcp": ["ghcr.io/a/server"]}, + })) + result = _collect_transitive_mcp_deps(tmp_path) + assert result == ["ghcr.io/a/server"] + + def test_collects_dict_deps(self, tmp_path): inline = {"name": "kb", "type": "sse", "url": "https://kb.example.com"} - with tempfile.TemporaryDirectory() as tmp: - pkg_dir = Path(tmp) / "org" / "pkg-b" - pkg_dir.mkdir(parents=True) - (pkg_dir / "apm.yml").write_text(yaml.dump({ - "name": "pkg-b", + pkg_dir = tmp_path / "org" / "pkg-b" + pkg_dir.mkdir(parents=True) + (pkg_dir / "apm.yml").write_text(yaml.dump({ + "name": "pkg-b", + "version": "1.0.0", + "dependencies": {"mcp": [inline]}, + })) + result = _collect_transitive_mcp_deps(tmp_path) + assert len(result) == 1 + assert result[0]["name"] == "kb" + + def test_collects_from_multiple_packages(self, tmp_path): + for i, dep in enumerate(["ghcr.io/a/s1", "ghcr.io/b/s2"]): + d = tmp_path / "org" / f"pkg-{i}" + d.mkdir(parents=True) + (d / "apm.yml").write_text(yaml.dump({ + "name": f"pkg-{i}", "version": "1.0.0", - "dependencies": {"mcp": [inline]}, + "dependencies": {"mcp": [dep]}, })) - result = _collect_transitive_mcp_deps(Path(tmp)) - self.assertEqual(len(result), 1) - self.assertEqual(result[0]["name"], "kb") - - def test_collects_from_multiple_packages(self): - with tempfile.TemporaryDirectory() as tmp: - for i, dep in enumerate(["ghcr.io/a/s1", "ghcr.io/b/s2"]): - d = Path(tmp) / "org" / f"pkg-{i}" - d.mkdir(parents=True) - (d / "apm.yml").write_text(yaml.dump({ - "name": f"pkg-{i}", - "version": "1.0.0", - "dependencies": {"mcp": [dep]}, - })) - result = _collect_transitive_mcp_deps(Path(tmp)) - self.assertEqual(len(result), 2) - - def test_skips_unparseable_apm_yml(self): - with tempfile.TemporaryDirectory() as tmp: - pkg_dir = Path(tmp) / "org" / "bad-pkg" - pkg_dir.mkdir(parents=True) - (pkg_dir / "apm.yml").write_text("invalid: yaml: [") - # Should not raise - result = _collect_transitive_mcp_deps(Path(tmp)) - self.assertEqual(result, []) - - def test_lockfile_scopes_collection_to_locked_packages(self): + result = _collect_transitive_mcp_deps(tmp_path) + assert len(result) == 2 + + def test_skips_unparseable_apm_yml(self, tmp_path): + pkg_dir = tmp_path / "org" / "bad-pkg" + pkg_dir.mkdir(parents=True) + (pkg_dir / "apm.yml").write_text("invalid: yaml: [") + # Should not raise + result = _collect_transitive_mcp_deps(tmp_path) + assert result == [] + + def test_lockfile_scopes_collection_to_locked_packages(self, tmp_path): """Lock-file filtering should only collect MCP deps from locked packages.""" - with tempfile.TemporaryDirectory() as tmp: - apm_modules = Path(tmp) / "apm_modules" - # Package that IS in the lock file - locked_dir = apm_modules / "org" / "locked-pkg" - locked_dir.mkdir(parents=True) - (locked_dir / "apm.yml").write_text(yaml.dump({ - "name": "locked-pkg", - "version": "1.0.0", - "dependencies": {"mcp": ["ghcr.io/locked/server"]}, - })) - # Package that is NOT in the lock file (orphan) - orphan_dir = apm_modules / "org" / "orphan-pkg" - orphan_dir.mkdir(parents=True) - (orphan_dir / "apm.yml").write_text(yaml.dump({ - "name": "orphan-pkg", - "version": "1.0.0", - "dependencies": {"mcp": ["ghcr.io/orphan/server"]}, - })) - # Write lock file referencing only the locked package - lock_path = Path(tmp) / "apm.lock" - lock_path.write_text(yaml.dump({ - "lockfile_version": "1", - "dependencies": [ - {"repo_url": "org/locked-pkg", "host": "github.com"}, - ], - })) - result = _collect_transitive_mcp_deps(apm_modules, lock_path) - self.assertEqual(result, ["ghcr.io/locked/server"]) - - def test_lockfile_with_virtual_path(self): + apm_modules = tmp_path / "apm_modules" + # Package that IS in the lock file + locked_dir = apm_modules / "org" / "locked-pkg" + locked_dir.mkdir(parents=True) + (locked_dir / "apm.yml").write_text(yaml.dump({ + "name": "locked-pkg", + "version": "1.0.0", + "dependencies": {"mcp": ["ghcr.io/locked/server"]}, + })) + # Package that is NOT in the lock file (orphan) + orphan_dir = apm_modules / "org" / "orphan-pkg" + orphan_dir.mkdir(parents=True) + (orphan_dir / "apm.yml").write_text(yaml.dump({ + "name": "orphan-pkg", + "version": "1.0.0", + "dependencies": {"mcp": ["ghcr.io/orphan/server"]}, + })) + # Write lock file referencing only the locked package + lock_path = tmp_path / "apm.lock" + lock_path.write_text(yaml.dump({ + "lockfile_version": "1", + "dependencies": [ + {"repo_url": "org/locked-pkg", "host": "github.com"}, + ], + })) + result = _collect_transitive_mcp_deps(apm_modules, lock_path) + assert result == ["ghcr.io/locked/server"] + + def test_lockfile_with_virtual_path(self, tmp_path): """Lock-file filtering works for subdirectory (virtual_path) packages.""" - with tempfile.TemporaryDirectory() as tmp: - apm_modules = Path(tmp) / "apm_modules" - # Subdirectory package matching lock entry - sub_dir = apm_modules / "org" / "monorepo" / "skills" / "azure" - sub_dir.mkdir(parents=True) - (sub_dir / "apm.yml").write_text(yaml.dump({ - "name": "azure-skill", - "version": "1.0.0", - "dependencies": {"mcp": [{"name": "learn", "type": "http", "url": "https://learn.example.com"}]}, - })) - # Another subdirectory NOT in the lock - other_dir = apm_modules / "org" / "monorepo" / "skills" / "other" - other_dir.mkdir(parents=True) - (other_dir / "apm.yml").write_text(yaml.dump({ - "name": "other-skill", - "version": "1.0.0", - "dependencies": {"mcp": ["ghcr.io/other/server"]}, - })) - lock_path = Path(tmp) / "apm.lock" - lock_path.write_text(yaml.dump({ - "lockfile_version": "1", - "dependencies": [ - {"repo_url": "org/monorepo", "host": "github.com", "virtual_path": "skills/azure"}, - ], - })) - result = _collect_transitive_mcp_deps(apm_modules, lock_path) - self.assertEqual(len(result), 1) - self.assertEqual(result[0]["name"], "learn") - - def test_lockfile_paths_do_not_use_full_rglob_scan(self): + apm_modules = tmp_path / "apm_modules" + # Subdirectory package matching lock entry + sub_dir = apm_modules / "org" / "monorepo" / "skills" / "azure" + sub_dir.mkdir(parents=True) + (sub_dir / "apm.yml").write_text(yaml.dump({ + "name": "azure-skill", + "version": "1.0.0", + "dependencies": {"mcp": [{"name": "learn", "type": "http", "url": "https://learn.example.com"}]}, + })) + # Another subdirectory NOT in the lock + other_dir = apm_modules / "org" / "monorepo" / "skills" / "other" + other_dir.mkdir(parents=True) + (other_dir / "apm.yml").write_text(yaml.dump({ + "name": "other-skill", + "version": "1.0.0", + "dependencies": {"mcp": ["ghcr.io/other/server"]}, + })) + lock_path = tmp_path / "apm.lock" + lock_path.write_text(yaml.dump({ + "lockfile_version": "1", + "dependencies": [ + {"repo_url": "org/monorepo", "host": "github.com", "virtual_path": "skills/azure"}, + ], + })) + result = _collect_transitive_mcp_deps(apm_modules, lock_path) + assert len(result) == 1 + assert result[0]["name"] == "learn" + + def test_lockfile_paths_do_not_use_full_rglob_scan(self, tmp_path): """When lock-derived paths are available, avoid full recursive scanning.""" - with tempfile.TemporaryDirectory() as tmp: - apm_modules = Path(tmp) / "apm_modules" - locked_dir = apm_modules / "org" / "locked-pkg" - locked_dir.mkdir(parents=True) - (locked_dir / "apm.yml").write_text(yaml.dump({ - "name": "locked-pkg", - "version": "1.0.0", - "dependencies": {"mcp": ["ghcr.io/locked/server"]}, - })) - - lock_path = Path(tmp) / "apm.lock" - lock_path.write_text(yaml.dump({ - "lockfile_version": "1", - "dependencies": [ - {"repo_url": "org/locked-pkg", "host": "github.com"}, - ], - })) - - with patch("pathlib.Path.rglob", side_effect=AssertionError("rglob should not be called")): - result = _collect_transitive_mcp_deps(apm_modules, lock_path) + apm_modules = tmp_path / "apm_modules" + locked_dir = apm_modules / "org" / "locked-pkg" + locked_dir.mkdir(parents=True) + (locked_dir / "apm.yml").write_text(yaml.dump({ + "name": "locked-pkg", + "version": "1.0.0", + "dependencies": {"mcp": ["ghcr.io/locked/server"]}, + })) + + lock_path = tmp_path / "apm.lock" + lock_path.write_text(yaml.dump({ + "lockfile_version": "1", + "dependencies": [ + {"repo_url": "org/locked-pkg", "host": "github.com"}, + ], + })) + + with patch("pathlib.Path.rglob", side_effect=AssertionError("rglob should not be called")): + result = _collect_transitive_mcp_deps(apm_modules, lock_path) - self.assertEqual(result, ["ghcr.io/locked/server"]) + assert result == ["ghcr.io/locked/server"] - def test_invalid_lockfile_falls_back_to_rglob_scan(self): + def test_invalid_lockfile_falls_back_to_rglob_scan(self, tmp_path): """If lock parsing fails, function falls back to scanning all apm.yml files.""" - with tempfile.TemporaryDirectory() as tmp: - apm_modules = Path(tmp) / "apm_modules" - pkg_dir = apm_modules / "org" / "pkg-a" - pkg_dir.mkdir(parents=True) - (pkg_dir / "apm.yml").write_text(yaml.dump({ - "name": "pkg-a", - "version": "1.0.0", - "dependencies": {"mcp": ["ghcr.io/a/server"]}, - })) + apm_modules = tmp_path / "apm_modules" + pkg_dir = apm_modules / "org" / "pkg-a" + pkg_dir.mkdir(parents=True) + (pkg_dir / "apm.yml").write_text(yaml.dump({ + "name": "pkg-a", + "version": "1.0.0", + "dependencies": {"mcp": ["ghcr.io/a/server"]}, + })) - lock_path = Path(tmp) / "apm.lock" - lock_path.write_text("dependencies: [") + lock_path = tmp_path / "apm.lock" + lock_path.write_text("dependencies: [") - result = _collect_transitive_mcp_deps(apm_modules, lock_path) - self.assertEqual(result, ["ghcr.io/a/server"]) + result = _collect_transitive_mcp_deps(apm_modules, lock_path) + assert result == ["ghcr.io/a/server"] # --------------------------------------------------------------------------- # _deduplicate_mcp_deps # --------------------------------------------------------------------------- -class TestDeduplicateMCPDeps(unittest.TestCase): +class TestDeduplicateMCPDeps: def test_deduplicates_strings(self): deps = ["a", "b", "a", "c", "b"] - self.assertEqual(_deduplicate_mcp_deps(deps), ["a", "b", "c"]) + assert _deduplicate_mcp_deps(deps) == ["a", "b", "c"] def test_deduplicates_dicts_by_name(self): d1 = {"name": "srv", "type": "sse", "url": "https://one"} d2 = {"name": "srv", "type": "sse", "url": "https://two"} # same name d3 = {"name": "other", "type": "sse", "url": "https://three"} result = _deduplicate_mcp_deps([d1, d2, d3]) - self.assertEqual(len(result), 2) - self.assertEqual(result[0]["url"], "https://one") # first wins + assert len(result) == 2 + assert result[0]["url"] == "https://one" # first wins def test_mixed_dedup(self): inline = {"name": "kb", "type": "sse", "url": "https://kb"} deps = ["a", inline, "a", {"name": "kb", "type": "sse", "url": "https://kb2"}] result = _deduplicate_mcp_deps(deps) - self.assertEqual(len(result), 2) - self.assertIsInstance(result[0], str) - self.assertIsInstance(result[1], dict) + assert len(result) == 2 + assert isinstance(result[0], str) + assert isinstance(result[1], dict) def test_empty_list(self): - self.assertEqual(_deduplicate_mcp_deps([]), []) + assert _deduplicate_mcp_deps([]) == [] def test_dict_without_name_kept(self): """Dicts without 'name' are kept if not already in result.""" d = {"type": "sse", "url": "https://x"} result = _deduplicate_mcp_deps([d, d]) - self.assertEqual(len(result), 1) + assert len(result) == 1 def test_root_deps_take_precedence_over_transitive(self): """When root and transitive share a key, the first (root) wins.""" @@ -314,40 +297,40 @@ def test_root_deps_take_precedence_over_transitive(self): # Root deps come first in the combined list combined = root + transitive result = _deduplicate_mcp_deps(combined) - self.assertEqual(len(result), 1) - self.assertEqual(result[0]["url"], "https://root-url") + assert len(result) == 1 + assert result[0]["url"] == "https://root-url" # --------------------------------------------------------------------------- # _validate_inline_url # --------------------------------------------------------------------------- -class TestValidateInlineUrl(unittest.TestCase): +class TestValidateInlineUrl: def test_allows_https(self): - self.assertTrue(_validate_inline_url("https://example.com/mcp", "srv")) + assert _validate_inline_url("https://example.com/mcp", "srv") def test_allows_http(self): - self.assertTrue(_validate_inline_url("http://localhost:8080", "srv")) + assert _validate_inline_url("http://localhost:8080", "srv") @patch("apm_cli.cli._rich_warning") def test_rejects_file_scheme(self, mock_warn): - self.assertFalse(_validate_inline_url("file:///etc/passwd", "bad")) + assert not _validate_inline_url("file:///etc/passwd", "bad") mock_warn.assert_called_once() - self.assertIn("disallowed URL scheme", mock_warn.call_args[0][0]) + assert "disallowed URL scheme" in mock_warn.call_args[0][0] @patch("apm_cli.cli._rich_warning") def test_rejects_data_scheme(self, mock_warn): - self.assertFalse(_validate_inline_url("data:text/html,

hi

", "bad")) + assert not _validate_inline_url("data:text/html,

hi

", "bad") @patch("apm_cli.cli._rich_warning") def test_rejects_empty_scheme(self, mock_warn): - self.assertFalse(_validate_inline_url("no-scheme-url", "bad")) + assert not _validate_inline_url("no-scheme-url", "bad") # --------------------------------------------------------------------------- # _install_inline_mcp_deps # --------------------------------------------------------------------------- -class TestInstallInlineMCPDeps(unittest.TestCase): +class TestInstallInlineMCPDeps: @patch("apm_cli.cli._get_console", return_value=None) @patch("apm_cli.factory.ClientFactory") @@ -359,13 +342,13 @@ def test_delegates_to_vscode_adapter(self, mock_factory, _console): deps = [{"name": "s1", "type": "sse", "url": "https://s1.example.com"}] count = _install_inline_mcp_deps(deps, ["vscode"]) - self.assertEqual(count, 1) + assert count == 1 mock_factory.create_client.assert_called_with("vscode") # VSCode path: read-merge-write with full config mock_adapter.get_current_config.assert_called_once() call_config = mock_adapter.update_config.call_args[0][0] - self.assertIn("s1", call_config["servers"]) - self.assertEqual(call_config["servers"]["s1"]["url"], "https://s1.example.com") + assert "s1" in call_config["servers"] + assert call_config["servers"]["s1"]["url"] == "https://s1.example.com" @patch("apm_cli.cli._get_console", return_value=None) @patch("apm_cli.factory.ClientFactory") @@ -376,11 +359,11 @@ def test_delegates_to_copilot_adapter(self, mock_factory, _console): deps = [{"name": "s1", "type": "sse", "url": "https://s1.example.com"}] count = _install_inline_mcp_deps(deps, ["copilot"]) - self.assertEqual(count, 1) + assert count == 1 mock_factory.create_client.assert_called_with("copilot") # Copilot path: merge dict passed directly call_config = mock_adapter.update_config.call_args[0][0] - self.assertIn("s1", call_config) + assert "s1" in call_config @patch("apm_cli.cli._get_console", return_value=None) @patch("apm_cli.factory.ClientFactory") @@ -391,11 +374,11 @@ def test_codex_uses_own_adapter_not_copilot(self, mock_factory, _console): deps = [{"name": "s1", "type": "sse", "url": "https://s1.example.com"}] count = _install_inline_mcp_deps(deps, ["codex"]) - self.assertEqual(count, 1) + assert count == 1 mock_factory.create_client.assert_called_with("codex") # Codex path: merge dict (adapter writes TOML internally) call_config = mock_adapter.update_config.call_args[0][0] - self.assertIn("s1", call_config) + assert "s1" in call_config @patch("apm_cli.cli._get_console", return_value=None) @patch("apm_cli.factory.ClientFactory") @@ -407,25 +390,25 @@ def test_installs_for_multiple_runtimes(self, mock_factory, _console): deps = [{"name": "s1", "type": "sse", "url": "https://s1.example.com"}] count = _install_inline_mcp_deps(deps, ["vscode", "copilot", "codex"]) - self.assertEqual(count, 1) - self.assertEqual(mock_factory.create_client.call_count, 3) + assert count == 1 + assert mock_factory.create_client.call_count == 3 @patch("apm_cli.cli._rich_warning") @patch("apm_cli.cli._get_console", return_value=None) def test_skips_dep_without_name(self, _console, mock_warn): deps = [{"type": "sse", "url": "https://no-name"}] count = _install_inline_mcp_deps(deps, ["vscode"]) - self.assertEqual(count, 0) - self.assertIn("safe fields", mock_warn.call_args[0][0]) - self.assertNotIn("https://no-name", mock_warn.call_args[0][0]) + assert count == 0 + assert "safe fields" in mock_warn.call_args[0][0] + assert "https://no-name" not in mock_warn.call_args[0][0] @patch("apm_cli.cli._rich_warning") @patch("apm_cli.cli._get_console", return_value=None) def test_skips_dep_with_disallowed_scheme(self, _console, mock_warn): deps = [{"name": "bad", "type": "sse", "url": "file:///etc/passwd"}] count = _install_inline_mcp_deps(deps, ["vscode"]) - self.assertEqual(count, 0) - self.assertIn("disallowed URL scheme", mock_warn.call_args[0][0]) + assert count == 0 + assert "disallowed URL scheme" in mock_warn.call_args[0][0] @patch("apm_cli.cli._get_console", return_value=None) @patch("apm_cli.factory.ClientFactory") @@ -438,7 +421,7 @@ def test_includes_headers_in_server_config(self, mock_factory, _console): _install_inline_mcp_deps(deps, ["vscode"]) call_config = mock_adapter.update_config.call_args[0][0] - self.assertIn("headers", call_config["servers"]["s1"]) + assert "headers" in call_config["servers"]["s1"] @patch("apm_cli.cli._get_console", return_value=None) @patch("apm_cli.factory.ClientFactory") @@ -452,7 +435,7 @@ def test_continues_on_adapter_failure(self, mock_factory, _console): {"name": "also-fail", "type": "sse", "url": "https://also"}, ] count = _install_inline_mcp_deps(deps, ["vscode"]) - self.assertEqual(count, 0) + assert count == 0 @patch("apm_cli.cli._rich_warning") @patch("apm_cli.cli._get_console", return_value=None) @@ -460,14 +443,14 @@ def test_missing_fields_warning_does_not_expose_headers(self, _console, mock_war deps = [{"type": "sse", "headers": {"Authorization": "Bearer secret"}}] _install_inline_mcp_deps(deps, ["vscode"]) warning_msg = mock_warn.call_args[0][0] - self.assertNotIn("secret", warning_msg) - self.assertNotIn("Authorization", warning_msg) + assert "secret" not in warning_msg + assert "Authorization" not in warning_msg # --------------------------------------------------------------------------- # _install_mcp_dependencies # --------------------------------------------------------------------------- -class TestInstallMCPDependencies(unittest.TestCase): +class TestInstallMCPDependencies: @patch("apm_cli.cli._get_console", return_value=None) @patch("apm_cli.registry.operations.MCPServerOperations") @@ -480,7 +463,7 @@ def test_already_configured_registry_servers_not_counted_as_new( count = _install_mcp_dependencies(["ghcr.io/org/server"], runtime="vscode") - self.assertEqual(count, 0) + assert count == 0 @patch("apm_cli.cli._install_for_runtime") @patch("apm_cli.cli._get_console", return_value=None) @@ -502,7 +485,7 @@ def test_counts_only_newly_configured_registry_servers( ["ghcr.io/org/already", "ghcr.io/org/new"], runtime="vscode" ) - self.assertEqual(count, 1) + assert count == 1 mock_install_runtime.assert_called_once() @patch("apm_cli.cli._install_for_runtime") @@ -510,7 +493,7 @@ def test_counts_only_newly_configured_registry_servers( def test_mixed_registry_servers_show_already_configured_and_count_only_new( self, mock_ops_cls, mock_install_runtime ): - mock_console = unittest.mock.MagicMock() + mock_console = MagicMock() mock_ops = mock_ops_cls.return_value mock_ops.validate_servers_exist.return_value = ( ["ghcr.io/org/already", "ghcr.io/org/new"], @@ -526,14 +509,10 @@ def test_mixed_registry_servers_show_already_configured_and_count_only_new( ["ghcr.io/org/already", "ghcr.io/org/new"], runtime="vscode" ) - self.assertEqual(count, 1) + assert count == 1 mock_install_runtime.assert_called_once() printed_lines = "\n".join( str(call.args[0]) for call in mock_console.print.call_args_list if call.args ) - self.assertIn("ghcr.io/org/already", printed_lines) - self.assertIn("already configured", printed_lines) - - -if __name__ == "__main__": - unittest.main() + assert "ghcr.io/org/already" in printed_lines + assert "already configured" in printed_lines From 940c9e4e9a31ab4285ed1ab02721dc977ef88620 Mon Sep 17 00:00:00 2001 From: sergio-sisternes-epam Date: Tue, 3 Mar 2026 12:23:55 +0000 Subject: [PATCH 9/9] refactor: remove inline dict MCP installation pipeline Scope down to registry-only MCP propagation per review feedback. Inline dict format contradicts #132 (registry-first) and #122 (closed as won't-fix). Removed: - _validate_inline_url() and _install_inline_mcp_deps() from cli.py - TestValidateInlineUrl (5 tests) and TestInstallInlineMCPDeps (10 tests) - Inline dict examples and descriptions from dependencies.md and integrations.md Kept (bug-fix scope): - _collect_transitive_mcp_deps() with LockFile-based collection - _deduplicate_mcp_deps() for registry string dedup - mcp: null guard in get_mcp_dependencies() - from_apm_yml() preserving dicts for data fidelity --- docs/dependencies.md | 10 +- docs/integrations.md | 6 -- src/apm_cli/cli.py | 116 +---------------------- tests/unit/test_transitive_mcp.py | 150 +----------------------------- 4 files changed, 5 insertions(+), 277 deletions(-) diff --git a/docs/dependencies.md b/docs/dependencies.md index 6d6e9c792..6f676694a 100644 --- a/docs/dependencies.md +++ b/docs/dependencies.md @@ -75,17 +75,9 @@ dependencies: - github/awesome-copilot/skills/review-and-refactor # Code review skill mcp: - io.github.github/github-mcp-server # Registry reference - - name: my-knowledge-base # Inline config - type: http - url: https://my-kb.example.com/ ``` -MCP dependencies support two formats: - -- **Registry strings** — resolve via the MCP server registry (e.g. `io.github.github/github-mcp-server`). -- **Inline dicts** — written directly to runtime configs, bypassing the registry. Useful for private or self-hosted servers. - -Inline entries require `name`, `type` (`sse` or `http`), and `url`. An optional `headers` map is also supported. +MCP dependencies resolve via the MCP server registry (e.g. `io.github.github/github-mcp-server`). MCP dependencies declared by transitive APM packages are collected automatically during `apm install`. diff --git a/docs/integrations.md b/docs/integrations.md index afe12090f..1cfc840c2 100644 --- a/docs/integrations.md +++ b/docs/integrations.md @@ -519,14 +519,8 @@ dependencies: - ghcr.io/github/github-mcp-server - ghcr.io/modelcontextprotocol/filesystem-server - ghcr.io/modelcontextprotocol/postgres-server - # Inline config (private / self-hosted servers) - - name: my-knowledge-base - type: http - url: https://my-kb.example.com/ ``` -Inline MCP entries (`name`/`type`/`url` dicts) are written directly into runtime configs and bypass the registry. They are also collected transitively from APM dependencies. - ```bash # Install MCP dependencies apm install diff --git a/src/apm_cli/cli.py b/src/apm_cli/cli.py index 5aed01394..d67fb77f7 100644 --- a/src/apm_cli/cli.py +++ b/src/apm_cli/cli.py @@ -2305,12 +2305,8 @@ def _install_mcp_dependencies( ): """Install MCP dependencies using existing logic. - Supports two formats: - - Registry strings (e.g. "ghcr.io/github/github-mcp-server") - - Inline dicts (e.g. {"name": "my-server", "type": "sse", "url": "…"}) - Args: - mcp_deps: List of MCP dependency entries (str or dict) + mcp_deps: List of MCP dependency entries (registry strings) runtime: Target specific runtime only exclude: Exclude specific runtime from installation verbose: Show detailed installation information @@ -2322,9 +2318,9 @@ def _install_mcp_dependencies( _rich_warning("No MCP dependencies found in apm.yml") return 0 - # Separate registry strings from inline dict configs + # Filter to registry strings only (inline dicts are preserved during + # parsing for data fidelity but not installed — see #132). registry_deps = [d for d in mcp_deps if isinstance(d, str)] - inline_deps = [d for d in mcp_deps if isinstance(d, dict)] console = _get_console() @@ -2549,11 +2545,6 @@ def _install_mcp_dependencies( _rich_error("Cannot validate MCP servers without registry operations") raise RuntimeError("Registry operations module required for MCP installation") - # --- Inline dict deps (name/type/url) --- - if inline_deps: - inline_count = _install_inline_mcp_deps(inline_deps, target_runtimes, verbose) - configured_count += inline_count - # Close the panel if console: if configured_count > 0: @@ -2566,108 +2557,7 @@ def _install_mcp_dependencies( return configured_count -def _validate_inline_url(url: str, dep_name: str) -> bool: - """Validate that an inline MCP dep URL uses an allowed scheme. - - Blocks file://, data:, javascript:, etc. from transitive packages. - - Args: - url: The URL to validate. - dep_name: Name of the dep (for warning messages). - - Returns: - True if the URL scheme is allowed, False otherwise. - """ - from urllib.parse import urlparse - - _ALLOWED_SCHEMES = {"https", "http"} - parsed = urlparse(url) - if parsed.scheme not in _ALLOWED_SCHEMES: - _rich_warning( - f"Skipping inline MCP '{dep_name}': disallowed URL scheme '{parsed.scheme}'" - ) - return False - return True - - -def _install_inline_mcp_deps( - inline_deps: list, target_runtimes: list, verbose: bool = False -) -> int: - """Install inline MCP dependencies via runtime client adapters. - - Delegates JSON/TOML I/O to the existing ClientFactory adapters - (VSCodeClientAdapter, CopilotClientAdapter, CodexClientAdapter) - instead of reimplementing config file handling. - - Args: - inline_deps: List of dict MCP entries. - target_runtimes: List of target runtime names. - verbose: Show detailed output. - - Returns: - int: Number of servers successfully configured. - """ - from apm_cli.factory import ClientFactory - - console = _get_console() - configured = 0 - - for dep in inline_deps: - name = dep.get("name", "") - server_type = dep.get("type", "sse") - url = dep.get("url", "") - headers = dep.get("headers", {}) - - if not name or not url: - safe_dep = { - "name": name or None, - "type": server_type or None, - "url_present": bool(url), - "has_headers": bool(headers), - } - _rich_warning( - "Skipping inline MCP dep with missing required fields " - f"(safe fields: {safe_dep})" - ) - continue - - # URL scheme validation (#6) - if not _validate_inline_url(url, name): - continue - - server_config = {"type": server_type, "url": url} - if headers: - server_config["headers"] = headers - - any_success = False - for rt in target_runtimes: - try: - adapter = ClientFactory.create_client(rt) - if rt == "vscode": - # VSCode adapter.update_config() is a full overwrite; - # read-merge-write to preserve existing servers. - config = adapter.get_current_config() - if "servers" not in config: - config["servers"] = {} - config["servers"][name] = server_config - adapter.update_config(config) - else: - # Copilot and Codex adapters merge internally. - adapter.update_config({name: server_config}) - any_success = True - except Exception as e: - _rich_error(f"Failed to configure inline MCP '{name}' for {rt}: {e}") - continue - - if any_success: - if console: - console.print( - f"│ [green]✓[/green] {name} (inline) → " - f"{', '.join([rt.title() for rt in target_runtimes])}" - ) - configured += 1 - return configured def _show_install_summary( diff --git a/tests/unit/test_transitive_mcp.py b/tests/unit/test_transitive_mcp.py index 98b2b8799..e802a0012 100644 --- a/tests/unit/test_transitive_mcp.py +++ b/tests/unit/test_transitive_mcp.py @@ -1,6 +1,5 @@ -"""Tests for transitive MCP dependency collection, deduplication, and inline installation.""" +"""Tests for transitive MCP dependency collection and deduplication.""" -import json from pathlib import Path from unittest.mock import patch, MagicMock @@ -10,9 +9,7 @@ from apm_cli.cli import ( _collect_transitive_mcp_deps, _deduplicate_mcp_deps, - _install_inline_mcp_deps, _install_mcp_dependencies, - _validate_inline_url, ) @@ -301,151 +298,6 @@ def test_root_deps_take_precedence_over_transitive(self): assert result[0]["url"] == "https://root-url" -# --------------------------------------------------------------------------- -# _validate_inline_url -# --------------------------------------------------------------------------- -class TestValidateInlineUrl: - - def test_allows_https(self): - assert _validate_inline_url("https://example.com/mcp", "srv") - - def test_allows_http(self): - assert _validate_inline_url("http://localhost:8080", "srv") - - @patch("apm_cli.cli._rich_warning") - def test_rejects_file_scheme(self, mock_warn): - assert not _validate_inline_url("file:///etc/passwd", "bad") - mock_warn.assert_called_once() - assert "disallowed URL scheme" in mock_warn.call_args[0][0] - - @patch("apm_cli.cli._rich_warning") - def test_rejects_data_scheme(self, mock_warn): - assert not _validate_inline_url("data:text/html,

hi

", "bad") - - @patch("apm_cli.cli._rich_warning") - def test_rejects_empty_scheme(self, mock_warn): - assert not _validate_inline_url("no-scheme-url", "bad") - - -# --------------------------------------------------------------------------- -# _install_inline_mcp_deps -# --------------------------------------------------------------------------- -class TestInstallInlineMCPDeps: - - @patch("apm_cli.cli._get_console", return_value=None) - @patch("apm_cli.factory.ClientFactory") - def test_delegates_to_vscode_adapter(self, mock_factory, _console): - mock_adapter = MagicMock() - mock_adapter.get_current_config.return_value = {"servers": {}} - mock_factory.create_client.return_value = mock_adapter - - deps = [{"name": "s1", "type": "sse", "url": "https://s1.example.com"}] - count = _install_inline_mcp_deps(deps, ["vscode"]) - - assert count == 1 - mock_factory.create_client.assert_called_with("vscode") - # VSCode path: read-merge-write with full config - mock_adapter.get_current_config.assert_called_once() - call_config = mock_adapter.update_config.call_args[0][0] - assert "s1" in call_config["servers"] - assert call_config["servers"]["s1"]["url"] == "https://s1.example.com" - - @patch("apm_cli.cli._get_console", return_value=None) - @patch("apm_cli.factory.ClientFactory") - def test_delegates_to_copilot_adapter(self, mock_factory, _console): - mock_adapter = MagicMock() - mock_factory.create_client.return_value = mock_adapter - - deps = [{"name": "s1", "type": "sse", "url": "https://s1.example.com"}] - count = _install_inline_mcp_deps(deps, ["copilot"]) - - assert count == 1 - mock_factory.create_client.assert_called_with("copilot") - # Copilot path: merge dict passed directly - call_config = mock_adapter.update_config.call_args[0][0] - assert "s1" in call_config - - @patch("apm_cli.cli._get_console", return_value=None) - @patch("apm_cli.factory.ClientFactory") - def test_codex_uses_own_adapter_not_copilot(self, mock_factory, _console): - mock_adapter = MagicMock() - mock_factory.create_client.return_value = mock_adapter - - deps = [{"name": "s1", "type": "sse", "url": "https://s1.example.com"}] - count = _install_inline_mcp_deps(deps, ["codex"]) - - assert count == 1 - mock_factory.create_client.assert_called_with("codex") - # Codex path: merge dict (adapter writes TOML internally) - call_config = mock_adapter.update_config.call_args[0][0] - assert "s1" in call_config - - @patch("apm_cli.cli._get_console", return_value=None) - @patch("apm_cli.factory.ClientFactory") - def test_installs_for_multiple_runtimes(self, mock_factory, _console): - mock_adapter = MagicMock() - mock_adapter.get_current_config.return_value = {"servers": {}} - mock_factory.create_client.return_value = mock_adapter - - deps = [{"name": "s1", "type": "sse", "url": "https://s1.example.com"}] - count = _install_inline_mcp_deps(deps, ["vscode", "copilot", "codex"]) - - assert count == 1 - assert mock_factory.create_client.call_count == 3 - - @patch("apm_cli.cli._rich_warning") - @patch("apm_cli.cli._get_console", return_value=None) - def test_skips_dep_without_name(self, _console, mock_warn): - deps = [{"type": "sse", "url": "https://no-name"}] - count = _install_inline_mcp_deps(deps, ["vscode"]) - assert count == 0 - assert "safe fields" in mock_warn.call_args[0][0] - assert "https://no-name" not in mock_warn.call_args[0][0] - - @patch("apm_cli.cli._rich_warning") - @patch("apm_cli.cli._get_console", return_value=None) - def test_skips_dep_with_disallowed_scheme(self, _console, mock_warn): - deps = [{"name": "bad", "type": "sse", "url": "file:///etc/passwd"}] - count = _install_inline_mcp_deps(deps, ["vscode"]) - assert count == 0 - assert "disallowed URL scheme" in mock_warn.call_args[0][0] - - @patch("apm_cli.cli._get_console", return_value=None) - @patch("apm_cli.factory.ClientFactory") - def test_includes_headers_in_server_config(self, mock_factory, _console): - mock_adapter = MagicMock() - mock_adapter.get_current_config.return_value = {"servers": {}} - mock_factory.create_client.return_value = mock_adapter - - deps = [{"name": "s1", "type": "sse", "url": "https://s1", "headers": {"Authorization": "Bearer x"}}] - _install_inline_mcp_deps(deps, ["vscode"]) - - call_config = mock_adapter.update_config.call_args[0][0] - assert "headers" in call_config["servers"]["s1"] - - @patch("apm_cli.cli._get_console", return_value=None) - @patch("apm_cli.factory.ClientFactory") - def test_continues_on_adapter_failure(self, mock_factory, _console): - mock_adapter = MagicMock() - mock_adapter.get_current_config.side_effect = Exception("write failed") - mock_factory.create_client.return_value = mock_adapter - - deps = [ - {"name": "fail", "type": "sse", "url": "https://fail"}, - {"name": "also-fail", "type": "sse", "url": "https://also"}, - ] - count = _install_inline_mcp_deps(deps, ["vscode"]) - assert count == 0 - - @patch("apm_cli.cli._rich_warning") - @patch("apm_cli.cli._get_console", return_value=None) - def test_missing_fields_warning_does_not_expose_headers(self, _console, mock_warn): - deps = [{"type": "sse", "headers": {"Authorization": "Bearer secret"}}] - _install_inline_mcp_deps(deps, ["vscode"]) - warning_msg = mock_warn.call_args[0][0] - assert "secret" not in warning_msg - assert "Authorization" not in warning_msg - # --------------------------------------------------------------------------- # _install_mcp_dependencies