From 349033edb3af6f3fe6172997de9557644ba81946 Mon Sep 17 00:00:00 2001 From: "copilot-swe-agent[bot]" <198982749+Copilot@users.noreply.github.com> Date: Wed, 8 Apr 2026 20:37:10 +0000 Subject: [PATCH 01/19] Initial plan From 3d9395f891564c9b959164048996a8e00b416968 Mon Sep 17 00:00:00 2001 From: "copilot-swe-agent[bot]" <198982749+Copilot@users.noreply.github.com> Date: Wed, 8 Apr 2026 20:48:17 +0000 Subject: [PATCH 02/19] feat: add integration catalog system with catalog files, IntegrationCatalog class, list --catalog flag, upgrade command, integration.yml descriptor, and tests Agent-Logs-Url: https://github.com/github/spec-kit/sessions/bbcd44e8-c69c-4735-adc1-bdf1ce109184 Co-authored-by: mnriem <15701806+mnriem@users.noreply.github.com> --- integrations/CONTRIBUTING.md | 135 ++++ integrations/README.md | 110 +++ integrations/catalog.community.json | 6 + integrations/catalog.json | 250 +++++++ src/specify_cli/__init__.py | 150 +++- src/specify_cli/integrations/catalog.py | 516 ++++++++++++++ .../integrations/test_integration_catalog.py | 642 ++++++++++++++++++ 7 files changed, 1808 insertions(+), 1 deletion(-) create mode 100644 integrations/CONTRIBUTING.md create mode 100644 integrations/README.md create mode 100644 integrations/catalog.community.json create mode 100644 integrations/catalog.json create mode 100644 src/specify_cli/integrations/catalog.py create mode 100644 tests/integrations/test_integration_catalog.py diff --git a/integrations/CONTRIBUTING.md b/integrations/CONTRIBUTING.md new file mode 100644 index 0000000000..70944bbc75 --- /dev/null +++ b/integrations/CONTRIBUTING.md @@ -0,0 +1,135 @@ +# Contributing to the Integration Catalog + +This guide covers adding integrations to both the **built-in** and **community** catalogs. + +## Adding a Built-In Integration + +Built-in integrations are maintained by the Spec Kit core team and ship with the CLI. + +### Checklist + +1. **Create the integration subpackage** under `src/specify_cli/integrations//` +2. **Implement the integration class** extending `MarkdownIntegration`, `TomlIntegration`, or `SkillsIntegration` +3. **Register the integration** in `src/specify_cli/integrations/__init__.py` +4. **Add tests** under `tests/integrations/test_integration_.py` +5. **Add a catalog entry** in `integrations/catalog.json` +6. **Update documentation** in `AGENTS.md` and `README.md` + +### Catalog Entry Format + +Add your integration to `integrations/catalog.json`: + +```json +{ + "my-agent": { + "id": "my-agent", + "name": "My Agent", + "version": "1.0.0", + "description": "Integration for My Agent", + "author": "spec-kit-core", + "repository": "https://github.com/github/spec-kit", + "tags": ["cli"] + } +} +``` + +## Adding a Community Integration + +Community integrations are contributed by external developers and listed in `integrations/catalog.community.json` for discovery. + +### Prerequisites + +1. **Working integration** — tested with `specify integration install` +2. **Public repository** — hosted on GitHub or similar +3. **`integration.yml` descriptor** — valid descriptor file (see below) +4. **Documentation** — README with usage instructions +5. **License** — open source license file + +### `integration.yml` Descriptor + +Every community integration must include an `integration.yml`: + +```yaml +schema_version: "1.0" +integration: + id: "my-agent" + name: "My Agent" + version: "1.0.0" + description: "Integration for My Agent" + author: "your-name" + repository: "https://github.com/your-name/speckit-my-agent" + license: "MIT" +requires: + speckit_version: ">=0.6.0" + tools: + - name: "my-agent" + version: ">=1.0.0" + required: true +provides: + commands: + - name: "speckit.specify" + file: "templates/speckit.specify.md" + scripts: + - update-context.sh +``` + +### Descriptor Validation Rules + +| Field | Rule | +|-------|------| +| `schema_version` | Must be `"1.0"` | +| `integration.id` | Lowercase alphanumeric + hyphens (`^[a-z0-9-]+$`) | +| `integration.version` | Valid semantic version | +| `requires.speckit_version` | PEP 440 version specifier | +| `provides` | Must include at least one command or script | +| `provides.commands[].name` | String identifier | +| `provides.commands[].file` | Relative path to template file | + +### Submitting to the Community Catalog + +1. **Fork** the [spec-kit repository](https://github.com/github/spec-kit) +2. **Add your entry** to `integrations/catalog.community.json`: + +```json +{ + "my-agent": { + "id": "my-agent", + "name": "My Agent", + "version": "1.0.0", + "description": "Integration for My Agent", + "author": "your-name", + "repository": "https://github.com/your-name/speckit-my-agent", + "tags": ["cli"] + } +} +``` + +3. **Open a pull request** with: + - Your catalog entry + - Link to your integration repository + - Confirmation that `integration.yml` is valid + +### Version Updates + +To update your integration version in the catalog: + +1. Release a new version of your integration +2. Open a PR updating the `version` field in `catalog.community.json` +3. Ensure backward compatibility or document breaking changes + +## Upgrade Workflow + +The `specify integration upgrade` command supports diff-aware upgrades: + +1. **Hash comparison** — the manifest records SHA-256 hashes of all installed files +2. **Modified file detection** — files changed since installation are flagged +3. **Safe default** — modified files are preserved unless `--force` is used +4. **Clean reinstall** — unmodified files are replaced with the latest version + +```bash +# Upgrade current integration (blocks if files are modified) +specify integration upgrade + +# Force upgrade (overwrites modified files) +specify integration upgrade --force +``` diff --git a/integrations/README.md b/integrations/README.md new file mode 100644 index 0000000000..5c7c6ea1ed --- /dev/null +++ b/integrations/README.md @@ -0,0 +1,110 @@ +# Spec Kit Integration Catalog + +The integration catalog enables discovery, versioning, and distribution of AI agent integrations for Spec Kit. + +## Catalog Files + +### Built-In Catalog (`catalog.json`) + +Contains integrations that ship with Spec Kit. These are maintained by the core team and always installable. + +### Community Catalog (`catalog.community.json`) + +Community-contributed integrations. Listed for discovery only — users install from the source repositories. + +## CLI Commands + +```bash +# List built-in integrations (default) +specify integration list + +# Browse full catalog (built-in + community) +specify integration list --catalog + +# Install an integration +specify integration install copilot + +# Upgrade the current integration (diff-aware) +specify integration upgrade + +# Upgrade with force (overwrite modified files) +specify integration upgrade --force +``` + +## Integration Descriptor (`integration.yml`) + +Each integration can include an `integration.yml` descriptor that documents its metadata, requirements, and provided commands/scripts: + +```yaml +schema_version: "1.0" +integration: + id: "my-agent" + name: "My Agent" + version: "1.0.0" + description: "Integration for My Agent" + author: "my-org" + repository: "https://github.com/my-org/speckit-my-agent" + license: "MIT" +requires: + speckit_version: ">=0.6.0" + tools: + - name: "my-agent" + version: ">=1.0.0" + required: true +provides: + commands: + - name: "speckit.specify" + file: "templates/speckit.specify.md" + - name: "speckit.plan" + file: "templates/speckit.plan.md" + scripts: + - update-context.sh + - update-context.ps1 +``` + +## Catalog Schema + +Both catalog files follow the same JSON schema: + +```json +{ + "schema_version": "1.0", + "updated_at": "2026-04-08T00:00:00Z", + "catalog_url": "https://...", + "integrations": { + "my-agent": { + "id": "my-agent", + "name": "My Agent", + "version": "1.0.0", + "description": "Integration for My Agent", + "author": "my-org", + "repository": "https://github.com/my-org/speckit-my-agent", + "tags": ["cli"] + } + } +} +``` + +### Required Fields + +| Field | Type | Description | +|-------|------|-------------| +| `schema_version` | string | Must be `"1.0"` | +| `updated_at` | string | ISO 8601 timestamp | +| `integrations` | object | Map of integration ID → metadata | + +### Integration Entry Fields + +| Field | Type | Required | Description | +|-------|------|----------|-------------| +| `id` | string | Yes | Unique ID (lowercase alphanumeric + hyphens) | +| `name` | string | Yes | Human-readable display name | +| `version` | string | Yes | Semantic version | +| `description` | string | Yes | One-line description | +| `author` | string | No | Author name or organization | +| `repository` | string | No | Source repository URL | +| `tags` | array | No | Searchable tags (e.g., `["cli", "ide"]`) | + +## Contributing + +See [CONTRIBUTING.md](CONTRIBUTING.md) for how to add integrations to the community catalog. diff --git a/integrations/catalog.community.json b/integrations/catalog.community.json new file mode 100644 index 0000000000..47eb6d550d --- /dev/null +++ b/integrations/catalog.community.json @@ -0,0 +1,6 @@ +{ + "schema_version": "1.0", + "updated_at": "2026-04-08T00:00:00Z", + "catalog_url": "https://raw.githubusercontent.com/github/spec-kit/main/integrations/catalog.community.json", + "integrations": {} +} diff --git a/integrations/catalog.json b/integrations/catalog.json new file mode 100644 index 0000000000..17955ef002 --- /dev/null +++ b/integrations/catalog.json @@ -0,0 +1,250 @@ +{ + "schema_version": "1.0", + "updated_at": "2026-04-08T00:00:00Z", + "catalog_url": "https://raw.githubusercontent.com/github/spec-kit/main/integrations/catalog.json", + "integrations": { + "claude": { + "id": "claude", + "name": "Claude Code", + "version": "1.0.0", + "description": "Anthropic Claude Code CLI integration", + "author": "spec-kit-core", + "repository": "https://github.com/github/spec-kit", + "tags": ["cli", "anthropic"] + }, + "copilot": { + "id": "copilot", + "name": "GitHub Copilot", + "version": "1.0.0", + "description": "GitHub Copilot IDE integration with agent commands and prompt files", + "author": "spec-kit-core", + "repository": "https://github.com/github/spec-kit", + "tags": ["ide", "github"] + }, + "gemini": { + "id": "gemini", + "name": "Gemini CLI", + "version": "1.0.0", + "description": "Google Gemini CLI integration with TOML command format", + "author": "spec-kit-core", + "repository": "https://github.com/github/spec-kit", + "tags": ["cli", "google"] + }, + "cursor-agent": { + "id": "cursor-agent", + "name": "Cursor", + "version": "1.0.0", + "description": "Cursor IDE integration", + "author": "spec-kit-core", + "repository": "https://github.com/github/spec-kit", + "tags": ["ide"] + }, + "windsurf": { + "id": "windsurf", + "name": "Windsurf", + "version": "1.0.0", + "description": "Windsurf IDE workflow integration", + "author": "spec-kit-core", + "repository": "https://github.com/github/spec-kit", + "tags": ["ide"] + }, + "amp": { + "id": "amp", + "name": "Amp", + "version": "1.0.0", + "description": "Amp CLI integration", + "author": "spec-kit-core", + "repository": "https://github.com/github/spec-kit", + "tags": ["cli"] + }, + "codex": { + "id": "codex", + "name": "Codex CLI", + "version": "1.0.0", + "description": "Codex CLI skills-based integration", + "author": "spec-kit-core", + "repository": "https://github.com/github/spec-kit", + "tags": ["cli", "skills"] + }, + "qwen": { + "id": "qwen", + "name": "Qwen Code", + "version": "1.0.0", + "description": "Alibaba Qwen Code CLI integration", + "author": "spec-kit-core", + "repository": "https://github.com/github/spec-kit", + "tags": ["cli", "alibaba"] + }, + "opencode": { + "id": "opencode", + "name": "opencode", + "version": "1.0.0", + "description": "opencode CLI integration", + "author": "spec-kit-core", + "repository": "https://github.com/github/spec-kit", + "tags": ["cli"] + }, + "forge": { + "id": "forge", + "name": "Forge", + "version": "1.0.0", + "description": "Forge CLI integration with parameter-based commands", + "author": "spec-kit-core", + "repository": "https://github.com/github/spec-kit", + "tags": ["cli"] + }, + "kiro-cli": { + "id": "kiro-cli", + "name": "Kiro CLI", + "version": "1.0.0", + "description": "Kiro CLI prompt-based integration", + "author": "spec-kit-core", + "repository": "https://github.com/github/spec-kit", + "tags": ["cli"] + }, + "junie": { + "id": "junie", + "name": "Junie", + "version": "1.0.0", + "description": "Junie by JetBrains CLI integration", + "author": "spec-kit-core", + "repository": "https://github.com/github/spec-kit", + "tags": ["cli", "jetbrains"] + }, + "auggie": { + "id": "auggie", + "name": "Auggie CLI", + "version": "1.0.0", + "description": "Auggie CLI integration", + "author": "spec-kit-core", + "repository": "https://github.com/github/spec-kit", + "tags": ["cli"] + }, + "shai": { + "id": "shai", + "name": "SHAI", + "version": "1.0.0", + "description": "SHAI CLI integration", + "author": "spec-kit-core", + "repository": "https://github.com/github/spec-kit", + "tags": ["cli"] + }, + "tabnine": { + "id": "tabnine", + "name": "Tabnine CLI", + "version": "1.0.0", + "description": "Tabnine CLI integration with TOML command format", + "author": "spec-kit-core", + "repository": "https://github.com/github/spec-kit", + "tags": ["cli"] + }, + "kilocode": { + "id": "kilocode", + "name": "Kilo Code", + "version": "1.0.0", + "description": "Kilo Code IDE workflow integration", + "author": "spec-kit-core", + "repository": "https://github.com/github/spec-kit", + "tags": ["ide"] + }, + "roo": { + "id": "roo", + "name": "Roo Code", + "version": "1.0.0", + "description": "Roo Code IDE integration", + "author": "spec-kit-core", + "repository": "https://github.com/github/spec-kit", + "tags": ["ide"] + }, + "bob": { + "id": "bob", + "name": "IBM Bob", + "version": "1.0.0", + "description": "IBM Bob IDE integration", + "author": "spec-kit-core", + "repository": "https://github.com/github/spec-kit", + "tags": ["ide", "ibm"] + }, + "trae": { + "id": "trae", + "name": "Trae", + "version": "1.0.0", + "description": "Trae IDE rules-based integration", + "author": "spec-kit-core", + "repository": "https://github.com/github/spec-kit", + "tags": ["ide"] + }, + "codebuddy": { + "id": "codebuddy", + "name": "CodeBuddy", + "version": "1.0.0", + "description": "CodeBuddy CLI integration", + "author": "spec-kit-core", + "repository": "https://github.com/github/spec-kit", + "tags": ["cli"] + }, + "qodercli": { + "id": "qodercli", + "name": "Qoder CLI", + "version": "1.0.0", + "description": "Qoder CLI integration", + "author": "spec-kit-core", + "repository": "https://github.com/github/spec-kit", + "tags": ["cli"] + }, + "kimi": { + "id": "kimi", + "name": "Kimi Code", + "version": "1.0.0", + "description": "Kimi Code CLI skills-based integration by Moonshot AI", + "author": "spec-kit-core", + "repository": "https://github.com/github/spec-kit", + "tags": ["cli", "skills"] + }, + "pi": { + "id": "pi", + "name": "Pi Coding Agent", + "version": "1.0.0", + "description": "Pi terminal coding agent prompt-based integration", + "author": "spec-kit-core", + "repository": "https://github.com/github/spec-kit", + "tags": ["cli"] + }, + "iflow": { + "id": "iflow", + "name": "iFlow CLI", + "version": "1.0.0", + "description": "iFlow CLI integration by iflow-ai", + "author": "spec-kit-core", + "repository": "https://github.com/github/spec-kit", + "tags": ["cli"] + }, + "vibe": { + "id": "vibe", + "name": "Mistral Vibe", + "version": "1.0.0", + "description": "Mistral Vibe CLI prompt-based integration", + "author": "spec-kit-core", + "repository": "https://github.com/github/spec-kit", + "tags": ["cli", "mistral"] + }, + "agy": { + "id": "agy", + "name": "Antigravity", + "version": "1.0.0", + "description": "Antigravity IDE skills-based integration", + "author": "spec-kit-core", + "repository": "https://github.com/github/spec-kit", + "tags": ["ide", "skills"] + }, + "generic": { + "id": "generic", + "name": "Generic (bring your own agent)", + "version": "1.0.0", + "description": "Generic integration for any agent via --ai-commands-dir", + "author": "spec-kit-core", + "repository": "https://github.com/github/spec-kit", + "tags": ["generic"] + } + } +} diff --git a/src/specify_cli/__init__.py b/src/specify_cli/__init__.py index 11b6e0eda5..0494acd58b 100644 --- a/src/specify_cli/__init__.py +++ b/src/specify_cli/__init__.py @@ -1624,7 +1624,9 @@ def _resolve_script_type(project_root: Path, script_type: str | None) -> str: @integration_app.command("list") -def integration_list(): +def integration_list( + catalog: bool = typer.Option(False, "--catalog", help="Browse full catalog (built-in + community)"), +): """List available integrations and installed status.""" from .integrations import INTEGRATION_REGISTRY @@ -1639,6 +1641,47 @@ def integration_list(): current = _read_integration_json(project_root) installed_key = current.get("integration") + if catalog: + from .integrations.catalog import IntegrationCatalog, IntegrationCatalogError + + ic = IntegrationCatalog(project_root) + try: + entries = ic.search() + except IntegrationCatalogError as exc: + console.print(f"[red]Error:[/red] {exc}") + raise typer.Exit(1) + + if not entries: + console.print("[yellow]No integrations found in catalog.[/yellow]") + return + + table = Table(title="Integration Catalog (built-in + community)") + table.add_column("ID", style="cyan") + table.add_column("Name") + table.add_column("Version") + table.add_column("Source") + table.add_column("Status") + + for entry in sorted(entries, key=lambda e: e["id"]): + eid = entry["id"] + cat_name = entry.get("_catalog_name", "") + if eid == installed_key: + status = "[green]installed[/green]" + elif eid in INTEGRATION_REGISTRY: + status = "built-in" + else: + status = "" + table.add_row( + eid, + entry.get("name", eid), + entry.get("version", ""), + cat_name, + status, + ) + + console.print(table) + return + table = Table(title="AI Agent Integrations") table.add_column("Key", style="cyan") table.add_column("Name") @@ -2025,6 +2068,111 @@ def integration_switch( console.print(f"\n[green]✓[/green] Switched to integration '{name}'") +@integration_app.command("upgrade") +def integration_upgrade( + key: str = typer.Argument(None, help="Integration key to upgrade (default: current integration)"), + force: bool = typer.Option(False, "--force", help="Force upgrade even if files are modified"), + script: str | None = typer.Option(None, "--script", help="Script type: sh or ps (default: from init-options.json or platform default)"), + integration_options: str | None = typer.Option(None, "--integration-options", help="Options for the integration"), +): + """Upgrade an integration by reinstalling with diff-aware file handling. + + Compares manifest hashes to detect locally modified files and + preserves them unless --force is used. + """ + from .integrations import get_integration + from .integrations.manifest import IntegrationManifest + + project_root = Path.cwd() + + specify_dir = project_root / ".specify" + if not specify_dir.exists(): + console.print("[red]Error:[/red] Not a spec-kit project (no .specify/ directory)") + console.print("Run this command from a spec-kit project root") + raise typer.Exit(1) + + current = _read_integration_json(project_root) + installed_key = current.get("integration") + + if key is None: + if not installed_key: + console.print("[yellow]No integration is currently installed.[/yellow]") + raise typer.Exit(0) + key = installed_key + + if installed_key and installed_key != key: + console.print( + f"[red]Error:[/red] Integration '{key}' is not the currently installed integration ('{installed_key}')." + ) + console.print(f"Use [cyan]specify integration switch {key}[/cyan] instead.") + raise typer.Exit(1) + + integration = get_integration(key) + if integration is None: + console.print(f"[red]Error:[/red] Unknown integration '{key}'") + raise typer.Exit(1) + + manifest_path = project_root / ".specify" / "integrations" / f"{key}.manifest.json" + if not manifest_path.exists(): + console.print(f"[yellow]No manifest found for integration '{key}'. Nothing to upgrade.[/yellow]") + console.print(f"Run [cyan]specify integration install {key}[/cyan] to perform a fresh install.") + raise typer.Exit(0) + + try: + old_manifest = IntegrationManifest.load(key, project_root) + except (ValueError, FileNotFoundError) as exc: + console.print(f"[red]Error:[/red] Integration manifest for '{key}' is unreadable: {exc}") + raise typer.Exit(1) + + # Detect modified files via manifest hashes + modified = old_manifest.check_modified() + if modified and not force: + console.print(f"[yellow]⚠[/yellow] {len(modified)} file(s) have been modified since installation:") + for rel in modified: + console.print(f" {rel}") + console.print("\nUse [cyan]--force[/cyan] to overwrite modified files, or resolve manually.") + raise typer.Exit(1) + + selected_script = _resolve_script_type(project_root, script) + + # Phase 1: Teardown old files + console.print(f"Upgrading integration: [cyan]{key}[/cyan]") + removed, skipped = old_manifest.uninstall(project_root, force=force) + if removed: + console.print(f" Removed {len(removed)} old file(s)") + if skipped: + console.print(f" [yellow]Preserved {len(skipped)} modified file(s)[/yellow]") + + # Phase 2: Reinstall + new_manifest = IntegrationManifest(key, project_root, version=get_speckit_version()) + + parsed_options: dict[str, Any] | None = None + if integration_options: + parsed_options = _parse_integration_options(integration, integration_options) + + try: + integration.setup( + project_root, + new_manifest, + parsed_options=parsed_options, + script_type=selected_script, + raw_options=integration_options, + ) + new_manifest.save() + _write_integration_json(project_root, key, selected_script) + _update_init_options_for_integration(project_root, integration, script_type=selected_script) + except Exception as exc: + try: + integration.teardown(project_root, new_manifest, force=True) + except Exception: + pass + console.print(f"[red]Error:[/red] Failed to upgrade integration: {exc}") + raise typer.Exit(1) + + name = (integration.config or {}).get("name", key) + console.print(f"\n[green]✓[/green] Integration '{name}' upgraded successfully") + + # ===== Preset Commands ===== diff --git a/src/specify_cli/integrations/catalog.py b/src/specify_cli/integrations/catalog.py new file mode 100644 index 0000000000..02a4a480cb --- /dev/null +++ b/src/specify_cli/integrations/catalog.py @@ -0,0 +1,516 @@ +"""Integration catalog — discovery, validation, and upgrade support. + +Provides: +- ``IntegrationCatalogEntry`` — single catalog source metadata. +- ``IntegrationCatalog`` — fetches, caches, and searches integration + catalogs (built-in + community). +- ``IntegrationDescriptor`` — loads and validates ``integration.yml``. +""" + +from __future__ import annotations + +import hashlib +import json +import os +import re +from dataclasses import dataclass +from datetime import datetime, timezone +from pathlib import Path +from typing import Any, Dict, List, Optional + +import yaml +from packaging import version as pkg_version + + +# --------------------------------------------------------------------------- +# Errors +# --------------------------------------------------------------------------- + +class IntegrationCatalogError(Exception): + """Raised when a catalog operation fails.""" + + +class IntegrationDescriptorError(Exception): + """Raised when an integration.yml descriptor is invalid.""" + + +# --------------------------------------------------------------------------- +# IntegrationCatalogEntry +# --------------------------------------------------------------------------- + +@dataclass +class IntegrationCatalogEntry: + """Represents a single catalog source in the catalog stack.""" + + url: str + name: str + priority: int + install_allowed: bool + description: str = "" + + +# --------------------------------------------------------------------------- +# IntegrationCatalog +# --------------------------------------------------------------------------- + +class IntegrationCatalog: + """Manages integration catalog fetching, caching, and searching.""" + + DEFAULT_CATALOG_URL = ( + "https://raw.githubusercontent.com/github/spec-kit/main/integrations/catalog.json" + ) + COMMUNITY_CATALOG_URL = ( + "https://raw.githubusercontent.com/github/spec-kit/main/integrations/catalog.community.json" + ) + CACHE_DURATION = 3600 # 1 hour + + def __init__(self, project_root: Path) -> None: + self.project_root = project_root + self.cache_dir = project_root / ".specify" / "integrations" / ".cache" + self.cache_file = self.cache_dir / "catalog.json" + self.cache_metadata_file = self.cache_dir / "catalog-metadata.json" + + # -- URL validation --------------------------------------------------- + + @staticmethod + def _validate_catalog_url(url: str) -> None: + from urllib.parse import urlparse + + parsed = urlparse(url) + is_localhost = parsed.hostname in ("localhost", "127.0.0.1", "::1") + if parsed.scheme != "https" and not (parsed.scheme == "http" and is_localhost): + raise IntegrationCatalogError( + f"Catalog URL must use HTTPS (got {parsed.scheme}://). " + "HTTP is only allowed for localhost." + ) + if not parsed.netloc: + raise IntegrationCatalogError( + "Catalog URL must be a valid URL with a host." + ) + + # -- Catalog stack ---------------------------------------------------- + + def _load_catalog_config( + self, config_path: Path + ) -> Optional[List[IntegrationCatalogEntry]]: + """Load catalog stack from a YAML file. + + Returns None when the file does not exist. + + Raises: + IntegrationCatalogError: on invalid content + """ + if not config_path.exists(): + return None + try: + data = yaml.safe_load(config_path.read_text(encoding="utf-8")) or {} + except (yaml.YAMLError, OSError, UnicodeError) as exc: + raise IntegrationCatalogError( + f"Failed to read catalog config {config_path}: {exc}" + ) + catalogs_data = data.get("catalogs", []) + if not catalogs_data: + raise IntegrationCatalogError( + f"Catalog config {config_path} exists but contains no 'catalogs' entries. " + f"Remove the file to use built-in defaults, or add valid catalog entries." + ) + if not isinstance(catalogs_data, list): + raise IntegrationCatalogError( + f"Invalid catalog config: 'catalogs' must be a list, " + f"got {type(catalogs_data).__name__}" + ) + entries: List[IntegrationCatalogEntry] = [] + skipped: List[int] = [] + for idx, item in enumerate(catalogs_data): + if not isinstance(item, dict): + raise IntegrationCatalogError( + f"Invalid catalog entry at index {idx}: " + f"expected a mapping, got {type(item).__name__}" + ) + url = str(item.get("url", "")).strip() + if not url: + skipped.append(idx) + continue + self._validate_catalog_url(url) + try: + priority = int(item.get("priority", idx + 1)) + except (TypeError, ValueError): + raise IntegrationCatalogError( + f"Invalid priority for catalog '{item.get('name', idx + 1)}': " + f"expected integer, got {item.get('priority')!r}" + ) + raw_install = item.get("install_allowed", False) + if isinstance(raw_install, str): + install_allowed = raw_install.strip().lower() in ("true", "yes", "1") + else: + install_allowed = bool(raw_install) + entries.append( + IntegrationCatalogEntry( + url=url, + name=str(item.get("name", f"catalog-{idx + 1}")), + priority=priority, + install_allowed=install_allowed, + description=str(item.get("description", "")), + ) + ) + entries.sort(key=lambda e: e.priority) + if not entries: + raise IntegrationCatalogError( + f"Catalog config {config_path} contains {len(catalogs_data)} " + f"entries but none have valid URLs (entries at indices {skipped} " + f"were skipped). Each catalog entry must have a 'url' field." + ) + return entries + + def get_active_catalogs(self) -> List[IntegrationCatalogEntry]: + """Return the ordered list of active integration catalogs. + + Resolution: + 1. ``SPECKIT_INTEGRATION_CATALOG_URL`` env var + 2. Project ```.specify/integration-catalogs.yml``` + 3. User ``~/.specify/integration-catalogs.yml`` + 4. Built-in defaults (built-in + community) + """ + import sys + + env_value = os.environ.get("SPECKIT_INTEGRATION_CATALOG_URL", "").strip() + if env_value: + self._validate_catalog_url(env_value) + if env_value != self.DEFAULT_CATALOG_URL: + print( + "Warning: Using non-default integration catalog. " + "Only use catalogs from sources you trust.", + file=sys.stderr, + ) + return [ + IntegrationCatalogEntry( + url=env_value, + name="custom", + priority=1, + install_allowed=True, + description="Custom catalog via SPECKIT_INTEGRATION_CATALOG_URL", + ) + ] + + project_cfg = self.project_root / ".specify" / "integration-catalogs.yml" + catalogs = self._load_catalog_config(project_cfg) + if catalogs is not None: + return catalogs + + user_cfg = Path.home() / ".specify" / "integration-catalogs.yml" + catalogs = self._load_catalog_config(user_cfg) + if catalogs is not None: + return catalogs + + return [ + IntegrationCatalogEntry( + url=self.DEFAULT_CATALOG_URL, + name="default", + priority=1, + install_allowed=True, + description="Built-in catalog of installable integrations", + ), + IntegrationCatalogEntry( + url=self.COMMUNITY_CATALOG_URL, + name="community", + priority=2, + install_allowed=False, + description="Community-contributed integrations (discovery only)", + ), + ] + + # -- Fetching --------------------------------------------------------- + + def _fetch_single_catalog( + self, + entry: IntegrationCatalogEntry, + force_refresh: bool = False, + ) -> Dict[str, Any]: + """Fetch one catalog, with per-URL caching.""" + import urllib.error + import urllib.request + + url_hash = hashlib.sha256(entry.url.encode()).hexdigest()[:16] + cache_file = self.cache_dir / f"catalog-{url_hash}.json" + cache_meta = self.cache_dir / f"catalog-{url_hash}-metadata.json" + + if not force_refresh and cache_file.exists() and cache_meta.exists(): + try: + meta = json.loads(cache_meta.read_text()) + cached_at = datetime.fromisoformat(meta.get("cached_at", "")) + if cached_at.tzinfo is None: + cached_at = cached_at.replace(tzinfo=timezone.utc) + age = (datetime.now(timezone.utc) - cached_at).total_seconds() + if age < self.CACHE_DURATION: + return json.loads(cache_file.read_text()) + except (json.JSONDecodeError, ValueError, KeyError, TypeError): + pass + + try: + with urllib.request.urlopen(entry.url, timeout=10) as resp: + catalog_data = json.loads(resp.read()) + + if ( + "schema_version" not in catalog_data + or "integrations" not in catalog_data + ): + raise IntegrationCatalogError( + f"Invalid catalog format from {entry.url}" + ) + + self.cache_dir.mkdir(parents=True, exist_ok=True) + cache_file.write_text(json.dumps(catalog_data, indent=2)) + cache_meta.write_text( + json.dumps( + { + "cached_at": datetime.now(timezone.utc).isoformat(), + "catalog_url": entry.url, + }, + indent=2, + ) + ) + return catalog_data + + except urllib.error.URLError as exc: + raise IntegrationCatalogError( + f"Failed to fetch catalog from {entry.url}: {exc}" + ) + except json.JSONDecodeError as exc: + raise IntegrationCatalogError( + f"Invalid JSON in catalog from {entry.url}: {exc}" + ) + + def _get_merged_integrations( + self, force_refresh: bool = False + ) -> List[Dict[str, Any]]: + """Fetch and merge integrations from all active catalogs. + + Higher-priority catalogs win on conflicts. Each dict is annotated + with ``_catalog_name`` and ``_install_allowed``. + """ + import sys + + active = self.get_active_catalogs() + merged: Dict[str, Dict[str, Any]] = {} + any_success = False + + for entry in active: + try: + data = self._fetch_single_catalog(entry, force_refresh) + any_success = True + except IntegrationCatalogError as exc: + print( + f"Warning: Could not fetch catalog '{entry.name}': {exc}", + file=sys.stderr, + ) + continue + + for integ_id, integ_data in data.get("integrations", {}).items(): + if integ_id not in merged: + merged[integ_id] = { + **integ_data, + "id": integ_id, + "_catalog_name": entry.name, + "_install_allowed": entry.install_allowed, + } + + if not any_success and active: + raise IntegrationCatalogError( + "Failed to fetch any integration catalog" + ) + + return list(merged.values()) + + # -- Search / info ---------------------------------------------------- + + def search( + self, + query: Optional[str] = None, + tag: Optional[str] = None, + author: Optional[str] = None, + ) -> List[Dict[str, Any]]: + """Search catalogs for integrations matching the given filters.""" + results: List[Dict[str, Any]] = [] + for item in self._get_merged_integrations(): + if author and item.get("author", "").lower() != author.lower(): + continue + if tag and tag.lower() not in [ + t.lower() for t in item.get("tags", []) + ]: + continue + if query: + haystack = " ".join( + [ + item.get("name", ""), + item.get("description", ""), + item.get("id", ""), + ] + + item.get("tags", []) + ).lower() + if query.lower() not in haystack: + continue + results.append(item) + return results + + def get_integration_info( + self, integration_id: str + ) -> Optional[Dict[str, Any]]: + """Return catalog metadata for a single integration, or None.""" + for item in self._get_merged_integrations(): + if item["id"] == integration_id: + return item + return None + + # -- Cache management ------------------------------------------------- + + def clear_cache(self) -> None: + """Remove all cached catalog files.""" + if self.cache_dir.exists(): + for f in self.cache_dir.glob("catalog-*.json"): + f.unlink(missing_ok=True) + + +# --------------------------------------------------------------------------- +# IntegrationDescriptor (integration.yml) +# --------------------------------------------------------------------------- + +class IntegrationDescriptor: + """Loads and validates an ``integration.yml`` descriptor. + + The descriptor mirrors ``extension.yml`` and ``preset.yml``:: + + schema_version: "1.0" + integration: + id: "my-agent" + name: "My Agent" + version: "1.0.0" + description: "Integration for My Agent" + author: "my-org" + requires: + speckit_version: ">=0.6.0" + tools: [...] + provides: + commands: [...] + scripts: [...] + """ + + SCHEMA_VERSION = "1.0" + REQUIRED_TOP_LEVEL = ["schema_version", "integration", "requires", "provides"] + + def __init__(self, descriptor_path: Path) -> None: + self.path = descriptor_path + self.data = self._load(descriptor_path) + self._validate() + + # -- Loading ---------------------------------------------------------- + + @staticmethod + def _load(path: Path) -> dict: + try: + with open(path, "r", encoding="utf-8") as fh: + return yaml.safe_load(fh) or {} + except yaml.YAMLError as exc: + raise IntegrationDescriptorError(f"Invalid YAML in {path}: {exc}") + except FileNotFoundError: + raise IntegrationDescriptorError(f"Descriptor not found: {path}") + + # -- Validation ------------------------------------------------------- + + def _validate(self) -> None: + for field in self.REQUIRED_TOP_LEVEL: + if field not in self.data: + raise IntegrationDescriptorError( + f"Missing required field: {field}" + ) + + if self.data["schema_version"] != self.SCHEMA_VERSION: + raise IntegrationDescriptorError( + f"Unsupported schema version: {self.data['schema_version']} " + f"(expected {self.SCHEMA_VERSION})" + ) + + integ = self.data["integration"] + for field in ("id", "name", "version", "description"): + if field not in integ: + raise IntegrationDescriptorError( + f"Missing integration.{field}" + ) + + if not re.match(r"^[a-z0-9-]+$", integ["id"]): + raise IntegrationDescriptorError( + f"Invalid integration ID '{integ['id']}': " + "must be lowercase alphanumeric with hyphens only" + ) + + try: + pkg_version.Version(integ["version"]) + except pkg_version.InvalidVersion: + raise IntegrationDescriptorError( + f"Invalid version: {integ['version']}" + ) + + requires = self.data["requires"] + if "speckit_version" not in requires: + raise IntegrationDescriptorError( + "Missing requires.speckit_version" + ) + + provides = self.data["provides"] + commands = provides.get("commands", []) + scripts = provides.get("scripts", []) + if "commands" in provides and not isinstance(commands, list): + raise IntegrationDescriptorError( + "Invalid provides.commands: expected a list" + ) + if "scripts" in provides and not isinstance(scripts, list): + raise IntegrationDescriptorError( + "Invalid provides.scripts: expected a list" + ) + if not commands and not scripts: + raise IntegrationDescriptorError( + "Integration must provide at least one command or script" + ) + for cmd in commands: + if "name" not in cmd or "file" not in cmd: + raise IntegrationDescriptorError( + "Command entry missing 'name' or 'file'" + ) + + # -- Property accessors ----------------------------------------------- + + @property + def id(self) -> str: + return self.data["integration"]["id"] + + @property + def name(self) -> str: + return self.data["integration"]["name"] + + @property + def version(self) -> str: + return self.data["integration"]["version"] + + @property + def description(self) -> str: + return self.data["integration"]["description"] + + @property + def requires_speckit_version(self) -> str: + return self.data["requires"]["speckit_version"] + + @property + def commands(self) -> List[Dict[str, Any]]: + return self.data.get("provides", {}).get("commands", []) + + @property + def scripts(self) -> List[str]: + return self.data.get("provides", {}).get("scripts", []) + + @property + def tools(self) -> List[Dict[str, Any]]: + return self.data.get("requires", {}).get("tools", []) + + def get_hash(self) -> str: + """SHA-256 hash of the descriptor file.""" + with open(self.path, "rb") as fh: + return f"sha256:{hashlib.sha256(fh.read()).hexdigest()}" diff --git a/tests/integrations/test_integration_catalog.py b/tests/integrations/test_integration_catalog.py new file mode 100644 index 0000000000..2049e7c7ff --- /dev/null +++ b/tests/integrations/test_integration_catalog.py @@ -0,0 +1,642 @@ +"""Tests for the integration catalog system (catalog.py).""" + +import json +import os +from pathlib import Path + +import pytest +import yaml + +from specify_cli.integrations.catalog import ( + IntegrationCatalog, + IntegrationCatalogEntry, + IntegrationCatalogError, + IntegrationDescriptor, + IntegrationDescriptorError, +) + + +# --------------------------------------------------------------------------- +# IntegrationCatalogEntry +# --------------------------------------------------------------------------- + + +class TestIntegrationCatalogEntry: + def test_create_entry(self): + entry = IntegrationCatalogEntry( + url="https://example.com/catalog.json", + name="test", + priority=1, + install_allowed=True, + description="Test catalog", + ) + assert entry.url == "https://example.com/catalog.json" + assert entry.name == "test" + assert entry.priority == 1 + assert entry.install_allowed is True + assert entry.description == "Test catalog" + + def test_default_description(self): + entry = IntegrationCatalogEntry( + url="https://example.com/catalog.json", + name="test", + priority=1, + install_allowed=False, + ) + assert entry.description == "" + + +# --------------------------------------------------------------------------- +# IntegrationCatalog — URL validation +# --------------------------------------------------------------------------- + + +class TestCatalogURLValidation: + def test_https_allowed(self): + IntegrationCatalog._validate_catalog_url("https://example.com/catalog.json") + + def test_http_rejected(self): + with pytest.raises(IntegrationCatalogError, match="HTTPS"): + IntegrationCatalog._validate_catalog_url("http://example.com/catalog.json") + + def test_http_localhost_allowed(self): + IntegrationCatalog._validate_catalog_url("http://localhost:8080/catalog.json") + IntegrationCatalog._validate_catalog_url("http://127.0.0.1/catalog.json") + + def test_missing_host_rejected(self): + with pytest.raises(IntegrationCatalogError, match="valid URL"): + IntegrationCatalog._validate_catalog_url("https:///no-host") + + +# --------------------------------------------------------------------------- +# IntegrationCatalog — active catalogs +# --------------------------------------------------------------------------- + + +class TestActiveCatalogs: + def test_defaults_when_no_config(self, tmp_path): + (tmp_path / ".specify").mkdir() + cat = IntegrationCatalog(tmp_path) + active = cat.get_active_catalogs() + assert len(active) == 2 + assert active[0].name == "default" + assert active[1].name == "community" + + def test_env_var_override(self, tmp_path, monkeypatch): + (tmp_path / ".specify").mkdir() + monkeypatch.setenv( + "SPECKIT_INTEGRATION_CATALOG_URL", + "https://custom.example.com/catalog.json", + ) + cat = IntegrationCatalog(tmp_path) + active = cat.get_active_catalogs() + assert len(active) == 1 + assert active[0].name == "custom" + + def test_project_config_overrides_defaults(self, tmp_path): + specify = tmp_path / ".specify" + specify.mkdir() + cfg = specify / "integration-catalogs.yml" + cfg.write_text(yaml.dump({ + "catalogs": [ + {"url": "https://my.example.com/cat.json", "name": "mine", "priority": 1, "install_allowed": True}, + ] + })) + cat = IntegrationCatalog(tmp_path) + active = cat.get_active_catalogs() + assert len(active) == 1 + assert active[0].name == "mine" + + def test_empty_config_raises(self, tmp_path): + specify = tmp_path / ".specify" + specify.mkdir() + cfg = specify / "integration-catalogs.yml" + cfg.write_text(yaml.dump({"catalogs": []})) + cat = IntegrationCatalog(tmp_path) + with pytest.raises(IntegrationCatalogError, match="no 'catalogs' entries"): + cat.get_active_catalogs() + + +# --------------------------------------------------------------------------- +# IntegrationCatalog — fetch & search (using local file:// catalog) +# --------------------------------------------------------------------------- + + +def _write_catalog(path: Path, integrations: dict) -> None: + """Helper: write a catalog JSON file.""" + path.parent.mkdir(parents=True, exist_ok=True) + path.write_text(json.dumps({ + "schema_version": "1.0", + "updated_at": "2026-01-01T00:00:00Z", + "integrations": integrations, + }, indent=2)) + + +class TestCatalogFetch: + """Tests that use a local HTTP server stub via monkeypatch.""" + + def _patch_urlopen(self, monkeypatch, catalog_data): + """Patch urllib.request.urlopen to return *catalog_data*.""" + + class FakeResponse: + def __init__(self, data): + self._data = json.dumps(data).encode() + + def read(self): + return self._data + + def __enter__(self): + return self + + def __exit__(self, *a): + pass + + def fake_urlopen(url, timeout=10): + return FakeResponse(catalog_data) + + import urllib.request + monkeypatch.setattr(urllib.request, "urlopen", fake_urlopen) + + def test_fetch_and_search_all(self, tmp_path, monkeypatch): + (tmp_path / ".specify").mkdir() + cat = IntegrationCatalog(tmp_path) + + catalog = { + "schema_version": "1.0", + "updated_at": "2026-01-01T00:00:00Z", + "integrations": { + "acme-coder": { + "id": "acme-coder", + "name": "Acme Coder", + "version": "2.0.0", + "description": "Community integration for Acme Coder", + "author": "acme-org", + "tags": ["cli"], + }, + }, + } + self._patch_urlopen(monkeypatch, catalog) + + results = cat.search() + assert len(results) >= 1 + ids = [r["id"] for r in results] + assert "acme-coder" in ids + + def test_search_by_tag(self, tmp_path, monkeypatch): + (tmp_path / ".specify").mkdir() + cat = IntegrationCatalog(tmp_path) + + catalog = { + "schema_version": "1.0", + "updated_at": "2026-01-01T00:00:00Z", + "integrations": { + "a": {"id": "a", "name": "A", "version": "1.0.0", "tags": ["cli"]}, + "b": {"id": "b", "name": "B", "version": "1.0.0", "tags": ["ide"]}, + }, + } + self._patch_urlopen(monkeypatch, catalog) + + results = cat.search(tag="cli") + assert all("cli" in r.get("tags", []) for r in results) + + def test_search_by_query(self, tmp_path, monkeypatch): + (tmp_path / ".specify").mkdir() + cat = IntegrationCatalog(tmp_path) + + catalog = { + "schema_version": "1.0", + "updated_at": "2026-01-01T00:00:00Z", + "integrations": { + "claude": {"id": "claude", "name": "Claude Code", "version": "1.0.0", "description": "Anthropic", "tags": []}, + "gemini": {"id": "gemini", "name": "Gemini CLI", "version": "1.0.0", "description": "Google", "tags": []}, + }, + } + self._patch_urlopen(monkeypatch, catalog) + + results = cat.search(query="claude") + assert len(results) == 1 + assert results[0]["id"] == "claude" + + def test_get_integration_info(self, tmp_path, monkeypatch): + (tmp_path / ".specify").mkdir() + cat = IntegrationCatalog(tmp_path) + + catalog = { + "schema_version": "1.0", + "updated_at": "2026-01-01T00:00:00Z", + "integrations": { + "claude": {"id": "claude", "name": "Claude Code", "version": "1.0.0"}, + }, + } + self._patch_urlopen(monkeypatch, catalog) + + info = cat.get_integration_info("claude") + assert info is not None + assert info["name"] == "Claude Code" + + assert cat.get_integration_info("nonexistent") is None + + def test_invalid_catalog_format(self, tmp_path, monkeypatch): + (tmp_path / ".specify").mkdir() + cat = IntegrationCatalog(tmp_path) + + self._patch_urlopen(monkeypatch, {"schema_version": "1.0"}) # missing "integrations" + + with pytest.raises(IntegrationCatalogError, match="Failed to fetch any integration catalog"): + cat.search() + + def test_clear_cache(self, tmp_path): + (tmp_path / ".specify").mkdir() + cat = IntegrationCatalog(tmp_path) + cat.cache_dir.mkdir(parents=True, exist_ok=True) + (cat.cache_dir / "catalog-abc123.json").write_text("{}") + cat.clear_cache() + assert not list(cat.cache_dir.glob("catalog-*.json")) + + +# --------------------------------------------------------------------------- +# IntegrationDescriptor (integration.yml) +# --------------------------------------------------------------------------- + +VALID_DESCRIPTOR = { + "schema_version": "1.0", + "integration": { + "id": "my-agent", + "name": "My Agent", + "version": "1.0.0", + "description": "Integration for My Agent", + "author": "my-org", + }, + "requires": { + "speckit_version": ">=0.6.0", + }, + "provides": { + "commands": [ + {"name": "speckit.specify", "file": "templates/speckit.specify.md"}, + ], + "scripts": ["update-context.sh"], + }, +} + + +class TestIntegrationDescriptor: + def _write(self, tmp_path, data): + p = tmp_path / "integration.yml" + p.write_text(yaml.dump(data)) + return p + + def test_valid_descriptor(self, tmp_path): + p = self._write(tmp_path, VALID_DESCRIPTOR) + desc = IntegrationDescriptor(p) + assert desc.id == "my-agent" + assert desc.name == "My Agent" + assert desc.version == "1.0.0" + assert desc.description == "Integration for My Agent" + assert desc.requires_speckit_version == ">=0.6.0" + assert len(desc.commands) == 1 + assert desc.scripts == ["update-context.sh"] + + def test_missing_schema_version(self, tmp_path): + data = {**VALID_DESCRIPTOR} + del data["schema_version"] + p = self._write(tmp_path, data) + with pytest.raises(IntegrationDescriptorError, match="Missing required field: schema_version"): + IntegrationDescriptor(p) + + def test_unsupported_schema_version(self, tmp_path): + data = {**VALID_DESCRIPTOR, "schema_version": "99.0"} + p = self._write(tmp_path, data) + with pytest.raises(IntegrationDescriptorError, match="Unsupported schema version"): + IntegrationDescriptor(p) + + def test_missing_integration_id(self, tmp_path): + data = {**VALID_DESCRIPTOR, "integration": {"name": "X", "version": "1.0.0", "description": "Y"}} + p = self._write(tmp_path, data) + with pytest.raises(IntegrationDescriptorError, match="Missing integration.id"): + IntegrationDescriptor(p) + + def test_invalid_id_format(self, tmp_path): + integ = {**VALID_DESCRIPTOR["integration"], "id": "BAD_ID"} + data = {**VALID_DESCRIPTOR, "integration": integ} + p = self._write(tmp_path, data) + with pytest.raises(IntegrationDescriptorError, match="Invalid integration ID"): + IntegrationDescriptor(p) + + def test_invalid_version(self, tmp_path): + integ = {**VALID_DESCRIPTOR["integration"], "version": "not-semver"} + data = {**VALID_DESCRIPTOR, "integration": integ} + p = self._write(tmp_path, data) + with pytest.raises(IntegrationDescriptorError, match="Invalid version"): + IntegrationDescriptor(p) + + def test_missing_speckit_version(self, tmp_path): + data = {**VALID_DESCRIPTOR, "requires": {}} + p = self._write(tmp_path, data) + with pytest.raises(IntegrationDescriptorError, match="requires.speckit_version"): + IntegrationDescriptor(p) + + def test_no_commands_or_scripts(self, tmp_path): + data = {**VALID_DESCRIPTOR, "provides": {}} + p = self._write(tmp_path, data) + with pytest.raises(IntegrationDescriptorError, match="at least one command or script"): + IntegrationDescriptor(p) + + def test_command_missing_name(self, tmp_path): + data = {**VALID_DESCRIPTOR, "provides": {"commands": [{"file": "x.md"}]}} + p = self._write(tmp_path, data) + with pytest.raises(IntegrationDescriptorError, match="missing 'name' or 'file'"): + IntegrationDescriptor(p) + + def test_commands_not_a_list(self, tmp_path): + data = {**VALID_DESCRIPTOR, "provides": {"commands": "not-a-list", "scripts": ["a.sh"]}} + p = self._write(tmp_path, data) + with pytest.raises(IntegrationDescriptorError, match="expected a list"): + IntegrationDescriptor(p) + + def test_scripts_not_a_list(self, tmp_path): + data = {**VALID_DESCRIPTOR, "provides": {"commands": [{"name": "a", "file": "b"}], "scripts": "not-a-list"}} + p = self._write(tmp_path, data) + with pytest.raises(IntegrationDescriptorError, match="expected a list"): + IntegrationDescriptor(p) + + def test_file_not_found(self, tmp_path): + with pytest.raises(IntegrationDescriptorError, match="Descriptor not found"): + IntegrationDescriptor(tmp_path / "nonexistent.yml") + + def test_invalid_yaml(self, tmp_path): + p = tmp_path / "integration.yml" + p.write_text(": : :") + with pytest.raises(IntegrationDescriptorError, match="Invalid YAML"): + IntegrationDescriptor(p) + + def test_get_hash(self, tmp_path): + p = self._write(tmp_path, VALID_DESCRIPTOR) + desc = IntegrationDescriptor(p) + h = desc.get_hash() + assert h.startswith("sha256:") + + def test_tools_accessor(self, tmp_path): + data = {**VALID_DESCRIPTOR, "requires": { + "speckit_version": ">=0.6.0", + "tools": [{"name": "my-agent", "version": ">=1.0.0", "required": True}], + }} + p = self._write(tmp_path, data) + desc = IntegrationDescriptor(p) + assert len(desc.tools) == 1 + assert desc.tools[0]["name"] == "my-agent" + + +# --------------------------------------------------------------------------- +# CLI: integration list --catalog +# --------------------------------------------------------------------------- + + +class TestIntegrationListCatalog: + """Test ``specify integration list --catalog``.""" + + def _init_project(self, tmp_path): + """Create a minimal spec-kit project.""" + from typer.testing import CliRunner + from specify_cli import app + runner = CliRunner() + project = tmp_path / "proj" + project.mkdir() + old = os.getcwd() + try: + os.chdir(project) + result = runner.invoke(app, [ + "init", "--here", + "--integration", "copilot", + "--script", "sh", + "--no-git", + "--ignore-agent-tools", + ], catch_exceptions=False) + finally: + os.chdir(old) + assert result.exit_code == 0, result.output + return project + + def test_list_catalog_flag(self, tmp_path, monkeypatch): + """--catalog should show catalog entries.""" + from typer.testing import CliRunner + from specify_cli import app + runner = CliRunner() + project = self._init_project(tmp_path) + + catalog = { + "schema_version": "1.0", + "updated_at": "2026-01-01T00:00:00Z", + "integrations": { + "test-agent": { + "id": "test-agent", + "name": "Test Agent", + "version": "1.0.0", + "description": "A test agent", + "tags": ["cli"], + }, + }, + } + + import urllib.request + + class FakeResponse: + def __init__(self, data): + self._data = json.dumps(data).encode() + def read(self): + return self._data + def __enter__(self): + return self + def __exit__(self, *a): + pass + + monkeypatch.setattr(urllib.request, "urlopen", lambda url, timeout=10: FakeResponse(catalog)) + + old = os.getcwd() + try: + os.chdir(project) + result = runner.invoke(app, ["integration", "list", "--catalog"]) + finally: + os.chdir(old) + + assert result.exit_code == 0 + assert "test-agent" in result.output + assert "Test Agent" in result.output + + def test_list_without_catalog_still_works(self, tmp_path): + """Default list (no --catalog) works as before.""" + from typer.testing import CliRunner + from specify_cli import app + runner = CliRunner() + project = self._init_project(tmp_path) + + old = os.getcwd() + try: + os.chdir(project) + result = runner.invoke(app, ["integration", "list"]) + finally: + os.chdir(old) + + assert result.exit_code == 0 + assert "copilot" in result.output + assert "installed" in result.output + + +# --------------------------------------------------------------------------- +# CLI: integration upgrade +# --------------------------------------------------------------------------- + + +class TestIntegrationUpgrade: + """Test ``specify integration upgrade``.""" + + def _init_project(self, tmp_path, integration="copilot"): + from typer.testing import CliRunner + from specify_cli import app + runner = CliRunner() + project = tmp_path / "proj" + project.mkdir() + old = os.getcwd() + try: + os.chdir(project) + result = runner.invoke(app, [ + "init", "--here", + "--integration", integration, + "--script", "sh", + "--no-git", + "--ignore-agent-tools", + ], catch_exceptions=False) + finally: + os.chdir(old) + assert result.exit_code == 0, result.output + return project + + def test_upgrade_requires_speckit_project(self, tmp_path): + from typer.testing import CliRunner + from specify_cli import app + runner = CliRunner() + old = os.getcwd() + try: + os.chdir(tmp_path) + result = runner.invoke(app, ["integration", "upgrade"]) + finally: + os.chdir(old) + assert result.exit_code != 0 + assert "Not a spec-kit project" in result.output + + def test_upgrade_no_integration_installed(self, tmp_path): + from typer.testing import CliRunner + from specify_cli import app + runner = CliRunner() + project = tmp_path / "proj" + project.mkdir() + (project / ".specify").mkdir() + old = os.getcwd() + try: + os.chdir(project) + result = runner.invoke(app, ["integration", "upgrade"]) + finally: + os.chdir(old) + assert result.exit_code == 0 + assert "No integration is currently installed" in result.output + + def test_upgrade_succeeds(self, tmp_path): + from typer.testing import CliRunner + from specify_cli import app + runner = CliRunner() + project = self._init_project(tmp_path, "copilot") + + old = os.getcwd() + try: + os.chdir(project) + result = runner.invoke(app, ["integration", "upgrade"], catch_exceptions=False) + finally: + os.chdir(old) + assert result.exit_code == 0 + assert "upgraded successfully" in result.output + + def test_upgrade_blocks_on_modified_files(self, tmp_path): + from typer.testing import CliRunner + from specify_cli import app + runner = CliRunner() + project = self._init_project(tmp_path, "copilot") + + # Modify a tracked file so the manifest hash won't match + manifest_path = project / ".specify" / "integrations" / "copilot.manifest.json" + assert manifest_path.exists(), "Manifest should exist after init" + manifest_data = json.loads(manifest_path.read_text()) + tracked_files = manifest_data.get("files", {}) + if tracked_files: + first_rel = next(iter(tracked_files)) + target_file = project / first_rel + if target_file.exists(): + target_file.write_text("MODIFIED CONTENT\n") + + old = os.getcwd() + try: + os.chdir(project) + result = runner.invoke(app, ["integration", "upgrade"]) + finally: + os.chdir(old) + assert result.exit_code != 0 + assert "modified" in result.output.lower() + + def test_upgrade_force_overwrites_modified(self, tmp_path): + from typer.testing import CliRunner + from specify_cli import app + runner = CliRunner() + project = self._init_project(tmp_path, "copilot") + + # Modify a tracked file + manifest_path = project / ".specify" / "integrations" / "copilot.manifest.json" + manifest_data = json.loads(manifest_path.read_text()) + tracked_files = manifest_data.get("files", {}) + if tracked_files: + first_rel = next(iter(tracked_files)) + target_file = project / first_rel + if target_file.exists(): + target_file.write_text("MODIFIED CONTENT\n") + + old = os.getcwd() + try: + os.chdir(project) + result = runner.invoke(app, ["integration", "upgrade", "--force"], catch_exceptions=False) + finally: + os.chdir(old) + assert result.exit_code == 0 + assert "upgraded successfully" in result.output + + def test_upgrade_wrong_integration_key(self, tmp_path): + from typer.testing import CliRunner + from specify_cli import app + runner = CliRunner() + project = self._init_project(tmp_path, "copilot") + + old = os.getcwd() + try: + os.chdir(project) + result = runner.invoke(app, ["integration", "upgrade", "claude"]) + finally: + os.chdir(old) + assert result.exit_code != 0 + assert "not the currently installed integration" in result.output + + def test_upgrade_no_manifest(self, tmp_path): + """Upgrade with missing manifest suggests fresh install.""" + from typer.testing import CliRunner + from specify_cli import app + runner = CliRunner() + project = self._init_project(tmp_path, "copilot") + + # Remove manifest + manifest_path = project / ".specify" / "integrations" / "copilot.manifest.json" + if manifest_path.exists(): + manifest_path.unlink() + + old = os.getcwd() + try: + os.chdir(project) + result = runner.invoke(app, ["integration", "upgrade"]) + finally: + os.chdir(old) + assert result.exit_code == 0 + assert "Nothing to upgrade" in result.output From eeac3b2eda0a5d9546553581e0cf398f52a9266d Mon Sep 17 00:00:00 2001 From: Manfred Riem <15701806+mnriem@users.noreply.github.com> Date: Wed, 15 Apr 2026 10:36:56 -0500 Subject: [PATCH 03/19] fix: address PR review feedback - Replace empty except with cache cleanup in _fetch_single_catalog - Log teardown failure warning instead of silent pass in upgrade - Validate catalog_data and integrations are dicts before use - Catch OSError/UnicodeError in IntegrationDescriptor._load - Add isinstance checks for integration/requires/provides/commands - Enforce semver (X.Y.Z) instead of PEP 440 for descriptor versions - Fix docstring and CONTRIBUTING.md to match actual block-on-modified behavior - Restore old manifest on upgrade failure for transactional safety --- integrations/CONTRIBUTING.md | 4 +-- src/specify_cli/__init__.py | 12 ++++++-- src/specify_cli/integrations/catalog.py | 39 +++++++++++++++++++++---- 3 files changed, 45 insertions(+), 10 deletions(-) diff --git a/integrations/CONTRIBUTING.md b/integrations/CONTRIBUTING.md index 70944bbc75..f9e1d90bc1 100644 --- a/integrations/CONTRIBUTING.md +++ b/integrations/CONTRIBUTING.md @@ -123,8 +123,8 @@ The `specify integration upgrade` command supports diff-aware upgrades: 1. **Hash comparison** — the manifest records SHA-256 hashes of all installed files 2. **Modified file detection** — files changed since installation are flagged -3. **Safe default** — modified files are preserved unless `--force` is used -4. **Clean reinstall** — unmodified files are replaced with the latest version +3. **Safe default** — the upgrade blocks if any installed files were modified since installation +4. **Forced reinstall** — passing `--force` overwrites modified files with the latest version ```bash # Upgrade current integration (blocks if files are modified) diff --git a/src/specify_cli/__init__.py b/src/specify_cli/__init__.py index 289658cd9a..08f624ae5a 100644 --- a/src/specify_cli/__init__.py +++ b/src/specify_cli/__init__.py @@ -2229,7 +2229,7 @@ def integration_upgrade( """Upgrade an integration by reinstalling with diff-aware file handling. Compares manifest hashes to detect locally modified files and - preserves them unless --force is used. + blocks the upgrade unless --force is used. """ from .integrations import get_integration from .integrations.manifest import IntegrationManifest @@ -2315,8 +2315,16 @@ def integration_upgrade( except Exception as exc: try: integration.teardown(project_root, new_manifest, force=True) + except Exception as teardown_exc: + console.print( + f"[yellow]Warning:[/yellow] Teardown during rollback also failed: {teardown_exc}" + ) + # Attempt to restore the old manifest so the project is not left broken + try: + old_manifest.save() + _write_integration_json(project_root, key, selected_script) except Exception: - pass + pass # Best-effort restoration; original error is more important console.print(f"[red]Error:[/red] Failed to upgrade integration: {exc}") raise typer.Exit(1) diff --git a/src/specify_cli/integrations/catalog.py b/src/specify_cli/integrations/catalog.py index 02a4a480cb..c5637548f9 100644 --- a/src/specify_cli/integrations/catalog.py +++ b/src/specify_cli/integrations/catalog.py @@ -19,7 +19,6 @@ from typing import Any, Dict, List, Optional import yaml -from packaging import version as pkg_version # --------------------------------------------------------------------------- @@ -244,12 +243,18 @@ def _fetch_single_catalog( if age < self.CACHE_DURATION: return json.loads(cache_file.read_text()) except (json.JSONDecodeError, ValueError, KeyError, TypeError): - pass + # Cache is invalid or stale metadata; delete and refetch from source. + cache_file.unlink(missing_ok=True) + cache_meta.unlink(missing_ok=True) try: with urllib.request.urlopen(entry.url, timeout=10) as resp: catalog_data = json.loads(resp.read()) + if not isinstance(catalog_data, dict): + raise IntegrationCatalogError( + f"Invalid catalog format from {entry.url}: expected a JSON object" + ) if ( "schema_version" not in catalog_data or "integrations" not in catalog_data @@ -257,6 +262,10 @@ def _fetch_single_catalog( raise IntegrationCatalogError( f"Invalid catalog format from {entry.url}" ) + if not isinstance(catalog_data.get("integrations"), dict): + raise IntegrationCatalogError( + f"Invalid catalog format from {entry.url}: 'integrations' must be a JSON object" + ) self.cache_dir.mkdir(parents=True, exist_ok=True) cache_file.write_text(json.dumps(catalog_data, indent=2)) @@ -413,6 +422,10 @@ def _load(path: Path) -> dict: raise IntegrationDescriptorError(f"Invalid YAML in {path}: {exc}") except FileNotFoundError: raise IntegrationDescriptorError(f"Descriptor not found: {path}") + except (OSError, UnicodeError) as exc: + raise IntegrationDescriptorError( + f"Unable to read descriptor {path}: {exc}" + ) # -- Validation ------------------------------------------------------- @@ -430,6 +443,10 @@ def _validate(self) -> None: ) integ = self.data["integration"] + if not isinstance(integ, dict): + raise IntegrationDescriptorError( + "'integration' must be a mapping" + ) for field in ("id", "name", "version", "description"): if field not in integ: raise IntegrationDescriptorError( @@ -442,20 +459,26 @@ def _validate(self) -> None: "must be lowercase alphanumeric with hyphens only" ) - try: - pkg_version.Version(integ["version"]) - except pkg_version.InvalidVersion: + if not re.match(r"^\d+\.\d+\.\d+", integ["version"]): raise IntegrationDescriptorError( - f"Invalid version: {integ['version']}" + f"Invalid version '{integ['version']}': must use semantic versioning (e.g., 1.0.0)" ) requires = self.data["requires"] + if not isinstance(requires, dict): + raise IntegrationDescriptorError( + "'requires' must be a mapping" + ) if "speckit_version" not in requires: raise IntegrationDescriptorError( "Missing requires.speckit_version" ) provides = self.data["provides"] + if not isinstance(provides, dict): + raise IntegrationDescriptorError( + "'provides' must be a mapping" + ) commands = provides.get("commands", []) scripts = provides.get("scripts", []) if "commands" in provides and not isinstance(commands, list): @@ -471,6 +494,10 @@ def _validate(self) -> None: "Integration must provide at least one command or script" ) for cmd in commands: + if not isinstance(cmd, dict): + raise IntegrationDescriptorError( + "Each command entry must be a mapping" + ) if "name" not in cmd or "file" not in cmd: raise IntegrationDescriptorError( "Command entry missing 'name' or 'file'" From b1a969479672cd4ebe5d6059fafe8eb58a0c5add Mon Sep 17 00:00:00 2001 From: Manfred Riem <15701806+mnriem@users.noreply.github.com> Date: Wed, 15 Apr 2026 14:27:05 -0500 Subject: [PATCH 04/19] refactor: address second round of PR review feedback - Remove dead cache_file/cache_metadata_file attributes from IntegrationCatalog - Deduplicate non-default catalog warning (show once per process) - Anchor version regex to reject partial matches like 1.0.0beta - Fix 'Preserved modified' message to 'Skipped' for accuracy - Make upgrade transactional: install new files first, then remove stale old-only files, so a failed setup leaves old integration intact - Update CONTRIBUTING.md: speckit_version validates presence only --- integrations/CONTRIBUTING.md | 2 +- src/specify_cli/__init__.py | 39 ++++++++++++++++--------- src/specify_cli/integrations/catalog.py | 16 +++++----- 3 files changed, 34 insertions(+), 23 deletions(-) diff --git a/integrations/CONTRIBUTING.md b/integrations/CONTRIBUTING.md index f9e1d90bc1..78b7824457 100644 --- a/integrations/CONTRIBUTING.md +++ b/integrations/CONTRIBUTING.md @@ -80,7 +80,7 @@ provides: | `schema_version` | Must be `"1.0"` | | `integration.id` | Lowercase alphanumeric + hyphens (`^[a-z0-9-]+$`) | | `integration.version` | Valid semantic version | -| `requires.speckit_version` | PEP 440 version specifier | +| `requires.speckit_version` | Required field; current validation checks presence only | | `provides` | Must include at least one command or script | | `provides.commands[].name` | String identifier | | `provides.commands[].file` | Relative path to template file | diff --git a/src/specify_cli/__init__.py b/src/specify_cli/__init__.py index 08f624ae5a..2d8b7cd270 100644 --- a/src/specify_cli/__init__.py +++ b/src/specify_cli/__init__.py @@ -2286,15 +2286,8 @@ def integration_upgrade( selected_script = _resolve_script_type(project_root, script) - # Phase 1: Teardown old files + # Phase 1: Install new files (overwrites existing; old-only files remain) console.print(f"Upgrading integration: [cyan]{key}[/cyan]") - removed, skipped = old_manifest.uninstall(project_root, force=force) - if removed: - console.print(f" Removed {len(removed)} old file(s)") - if skipped: - console.print(f" [yellow]Preserved {len(skipped)} modified file(s)[/yellow]") - - # Phase 2: Reinstall new_manifest = IntegrationManifest(key, project_root, version=get_speckit_version()) parsed_options: dict[str, Any] | None = None @@ -2319,15 +2312,33 @@ def integration_upgrade( console.print( f"[yellow]Warning:[/yellow] Teardown during rollback also failed: {teardown_exc}" ) - # Attempt to restore the old manifest so the project is not left broken - try: - old_manifest.save() - _write_integration_json(project_root, key, selected_script) - except Exception: - pass # Best-effort restoration; original error is more important console.print(f"[red]Error:[/red] Failed to upgrade integration: {exc}") raise typer.Exit(1) + # Phase 2: Remove stale files from old manifest that are not in the new one + old_files = set(old_manifest.files.keys()) + new_files = set(new_manifest.files.keys()) + stale_files = old_files - new_files + stale_removed = 0 + for rel in stale_files: + path = project_root / rel + if path.exists() and path.is_file(): + try: + path.unlink() + stale_removed += 1 + # Clean up empty parent directories up to project root + parent = path.parent + while parent != project_root: + try: + parent.rmdir() + except OSError: + break + parent = parent.parent + except OSError: + pass + if stale_removed: + console.print(f" Removed {stale_removed} stale file(s) from previous install") + name = (integration.config or {}).get("name", key) console.print(f"\n[green]✓[/green] Integration '{name}' upgraded successfully") diff --git a/src/specify_cli/integrations/catalog.py b/src/specify_cli/integrations/catalog.py index c5637548f9..1097c64e8f 100644 --- a/src/specify_cli/integrations/catalog.py +++ b/src/specify_cli/integrations/catalog.py @@ -66,8 +66,6 @@ class IntegrationCatalog: def __init__(self, project_root: Path) -> None: self.project_root = project_root self.cache_dir = project_root / ".specify" / "integrations" / ".cache" - self.cache_file = self.cache_dir / "catalog.json" - self.cache_metadata_file = self.cache_dir / "catalog-metadata.json" # -- URL validation --------------------------------------------------- @@ -176,11 +174,13 @@ def get_active_catalogs(self) -> List[IntegrationCatalogEntry]: if env_value: self._validate_catalog_url(env_value) if env_value != self.DEFAULT_CATALOG_URL: - print( - "Warning: Using non-default integration catalog. " - "Only use catalogs from sources you trust.", - file=sys.stderr, - ) + if not getattr(self, "_non_default_catalog_warning_shown", False): + print( + "Warning: Using non-default integration catalog. " + "Only use catalogs from sources you trust.", + file=sys.stderr, + ) + self._non_default_catalog_warning_shown = True return [ IntegrationCatalogEntry( url=env_value, @@ -459,7 +459,7 @@ def _validate(self) -> None: "must be lowercase alphanumeric with hyphens only" ) - if not re.match(r"^\d+\.\d+\.\d+", integ["version"]): + if not re.match(r"^\d+\.\d+\.\d+$", integ["version"]): raise IntegrationDescriptorError( f"Invalid version '{integ['version']}': must use semantic versioning (e.g., 1.0.0)" ) From 7891de179897198948ba74f894840c7ffe65361f Mon Sep 17 00:00:00 2001 From: Manfred Riem <15701806+mnriem@users.noreply.github.com> Date: Wed, 15 Apr 2026 14:35:36 -0500 Subject: [PATCH 05/19] Potential fix for pull request finding 'Empty except' Co-authored-by: Copilot Autofix powered by AI <223894421+github-code-quality[bot]@users.noreply.github.com> --- src/specify_cli/__init__.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/src/specify_cli/__init__.py b/src/specify_cli/__init__.py index 2d8b7cd270..cc5b7897ec 100644 --- a/src/specify_cli/__init__.py +++ b/src/specify_cli/__init__.py @@ -2335,6 +2335,8 @@ def integration_upgrade( break parent = parent.parent except OSError: + # Best-effort cleanup: if a stale file cannot be removed (e.g. permission/race), + # continue upgrade flow without failing the command. pass if stale_removed: console.print(f" Removed {stale_removed} stale file(s) from previous install") From 8ca42b6cd27520a8fda114812cdaefccd316f835 Mon Sep 17 00:00:00 2001 From: Manfred Riem <15701806+mnriem@users.noreply.github.com> Date: Wed, 15 Apr 2026 14:46:53 -0500 Subject: [PATCH 06/19] fix: address third round of PR review feedback - Fix CONTRIBUTING.md JSON examples to show full catalog structure with schema_version and integrations wrapper - Wrap cache writes in try/except OSError for read-only project dirs - Validate _load_catalog_config YAML root is a dict - Skip non-dict integ_data entries in merged catalog - Normalize tags to list-of-strings before filtering/searching - Add path traversal containment check for stale file deletion - Clarify docstring: lower numeric priority = higher precedence --- integrations/CONTRIBUTING.md | 42 ++++++++++++---------- src/specify_cli/__init__.py | 7 ++++ src/specify_cli/integrations/catalog.py | 48 ++++++++++++++++--------- 3 files changed, 62 insertions(+), 35 deletions(-) diff --git a/integrations/CONTRIBUTING.md b/integrations/CONTRIBUTING.md index 78b7824457..11dec3d984 100644 --- a/integrations/CONTRIBUTING.md +++ b/integrations/CONTRIBUTING.md @@ -17,18 +17,21 @@ Built-in integrations are maintained by the Spec Kit core team and ship with the ### Catalog Entry Format -Add your integration to `integrations/catalog.json`: +Add your integration under the top-level `integrations` key in `integrations/catalog.json`: ```json { - "my-agent": { - "id": "my-agent", - "name": "My Agent", - "version": "1.0.0", - "description": "Integration for My Agent", - "author": "spec-kit-core", - "repository": "https://github.com/github/spec-kit", - "tags": ["cli"] + "schema_version": "1.0", + "integrations": { + "my-agent": { + "id": "my-agent", + "name": "My Agent", + "version": "1.0.0", + "description": "Integration for My Agent", + "author": "spec-kit-core", + "repository": "https://github.com/github/spec-kit", + "tags": ["cli"] + } } } ``` @@ -88,18 +91,21 @@ provides: ### Submitting to the Community Catalog 1. **Fork** the [spec-kit repository](https://github.com/github/spec-kit) -2. **Add your entry** to `integrations/catalog.community.json`: +2. **Add your entry** under the `integrations` key in `integrations/catalog.community.json`: ```json { - "my-agent": { - "id": "my-agent", - "name": "My Agent", - "version": "1.0.0", - "description": "Integration for My Agent", - "author": "your-name", - "repository": "https://github.com/your-name/speckit-my-agent", - "tags": ["cli"] + "schema_version": "1.0", + "integrations": { + "my-agent": { + "id": "my-agent", + "name": "My Agent", + "version": "1.0.0", + "description": "Integration for My Agent", + "author": "your-name", + "repository": "https://github.com/your-name/speckit-my-agent", + "tags": ["cli"] + } } } ``` diff --git a/src/specify_cli/__init__.py b/src/specify_cli/__init__.py index cc5b7897ec..30b97a384d 100644 --- a/src/specify_cli/__init__.py +++ b/src/specify_cli/__init__.py @@ -2316,12 +2316,19 @@ def integration_upgrade( raise typer.Exit(1) # Phase 2: Remove stale files from old manifest that are not in the new one + resolved_project_root = project_root.resolve() old_files = set(old_manifest.files.keys()) new_files = set(new_manifest.files.keys()) stale_files = old_files - new_files stale_removed = 0 for rel in stale_files: path = project_root / rel + # Validate containment to prevent path traversal from tampered manifests + try: + normed = Path(os.path.normpath(path)) + normed.relative_to(resolved_project_root) + except (ValueError, OSError): + continue if path.exists() and path.is_file(): try: path.unlink() diff --git a/src/specify_cli/integrations/catalog.py b/src/specify_cli/integrations/catalog.py index 1097c64e8f..704b7c61e6 100644 --- a/src/specify_cli/integrations/catalog.py +++ b/src/specify_cli/integrations/catalog.py @@ -105,6 +105,10 @@ def _load_catalog_config( raise IntegrationCatalogError( f"Failed to read catalog config {config_path}: {exc}" ) + if not isinstance(data, dict): + raise IntegrationCatalogError( + f"Invalid catalog config {config_path}: expected a YAML mapping at the root" + ) catalogs_data = data.get("catalogs", []) if not catalogs_data: raise IntegrationCatalogError( @@ -267,17 +271,20 @@ def _fetch_single_catalog( f"Invalid catalog format from {entry.url}: 'integrations' must be a JSON object" ) - self.cache_dir.mkdir(parents=True, exist_ok=True) - cache_file.write_text(json.dumps(catalog_data, indent=2)) - cache_meta.write_text( - json.dumps( - { - "cached_at": datetime.now(timezone.utc).isoformat(), - "catalog_url": entry.url, - }, - indent=2, + try: + self.cache_dir.mkdir(parents=True, exist_ok=True) + cache_file.write_text(json.dumps(catalog_data, indent=2)) + cache_meta.write_text( + json.dumps( + { + "cached_at": datetime.now(timezone.utc).isoformat(), + "catalog_url": entry.url, + }, + indent=2, + ) ) - ) + except OSError: + pass # Cache is best-effort; proceed with fetched data return catalog_data except urllib.error.URLError as exc: @@ -294,8 +301,10 @@ def _get_merged_integrations( ) -> List[Dict[str, Any]]: """Fetch and merge integrations from all active catalogs. - Higher-priority catalogs win on conflicts. Each dict is annotated - with ``_catalog_name`` and ``_install_allowed``. + Catalogs are processed in the order returned by + :meth:`get_active_catalogs`. On conflicts, the first catalog in that + order wins (lower numeric priority = higher precedence). Each dict is + annotated with ``_catalog_name`` and ``_install_allowed``. """ import sys @@ -315,6 +324,8 @@ def _get_merged_integrations( continue for integ_id, integ_data in data.get("integrations", {}).items(): + if not isinstance(integ_data, dict): + continue if integ_id not in merged: merged[integ_id] = { **integ_data, @@ -343,18 +354,21 @@ def search( for item in self._get_merged_integrations(): if author and item.get("author", "").lower() != author.lower(): continue - if tag and tag.lower() not in [ - t.lower() for t in item.get("tags", []) - ]: - continue + if tag: + raw_tags = item.get("tags", []) + tags_list = raw_tags if isinstance(raw_tags, list) else [] + if tag.lower() not in [t.lower() for t in tags_list if isinstance(t, str)]: + continue if query: + raw_tags = item.get("tags", []) + tags_list = raw_tags if isinstance(raw_tags, list) else [] haystack = " ".join( [ item.get("name", ""), item.get("description", ""), item.get("id", ""), ] - + item.get("tags", []) + + [t for t in tags_list if isinstance(t, str)] ).lower() if query.lower() not in haystack: continue From 20e5f9724ee1a8fc23c0f60f38e3e7b2385d3c11 Mon Sep 17 00:00:00 2001 From: Manfred Riem <15701806+mnriem@users.noreply.github.com> Date: Wed, 15 Apr 2026 15:07:35 -0500 Subject: [PATCH 07/19] fix: address fourth round of PR review feedback - Remove unused _write_catalog helper from test file - Fix comment: tests use monkeypatched urlopen, not file:// URLs - Wrap cache unlink calls in OSError handler - Add explicit encoding='utf-8' to all cache read_text/write_text calls - Restore packaging.version.Version for descriptor version validation to align with extension/preset validators - Add missing goose entry to integrations/catalog.json --- integrations/catalog.json | 9 ++++++++ src/specify_cli/integrations/catalog.py | 23 ++++++++++++------- .../integrations/test_integration_catalog.py | 12 +--------- 3 files changed, 25 insertions(+), 19 deletions(-) diff --git a/integrations/catalog.json b/integrations/catalog.json index 17955ef002..3df96b8789 100644 --- a/integrations/catalog.json +++ b/integrations/catalog.json @@ -245,6 +245,15 @@ "author": "spec-kit-core", "repository": "https://github.com/github/spec-kit", "tags": ["generic"] + }, + "goose": { + "id": "goose", + "name": "Goose", + "version": "1.0.0", + "description": "Goose CLI integration with YAML recipe format", + "author": "spec-kit-core", + "repository": "https://github.com/github/spec-kit", + "tags": ["cli"] } } } diff --git a/src/specify_cli/integrations/catalog.py b/src/specify_cli/integrations/catalog.py index 704b7c61e6..ff6e7c1e19 100644 --- a/src/specify_cli/integrations/catalog.py +++ b/src/specify_cli/integrations/catalog.py @@ -19,6 +19,7 @@ from typing import Any, Dict, List, Optional import yaml +from packaging import version as pkg_version # --------------------------------------------------------------------------- @@ -239,17 +240,20 @@ def _fetch_single_catalog( if not force_refresh and cache_file.exists() and cache_meta.exists(): try: - meta = json.loads(cache_meta.read_text()) + meta = json.loads(cache_meta.read_text(encoding="utf-8")) cached_at = datetime.fromisoformat(meta.get("cached_at", "")) if cached_at.tzinfo is None: cached_at = cached_at.replace(tzinfo=timezone.utc) age = (datetime.now(timezone.utc) - cached_at).total_seconds() if age < self.CACHE_DURATION: - return json.loads(cache_file.read_text()) + return json.loads(cache_file.read_text(encoding="utf-8")) except (json.JSONDecodeError, ValueError, KeyError, TypeError): # Cache is invalid or stale metadata; delete and refetch from source. - cache_file.unlink(missing_ok=True) - cache_meta.unlink(missing_ok=True) + try: + cache_file.unlink(missing_ok=True) + cache_meta.unlink(missing_ok=True) + except OSError: + pass try: with urllib.request.urlopen(entry.url, timeout=10) as resp: @@ -273,7 +277,7 @@ def _fetch_single_catalog( try: self.cache_dir.mkdir(parents=True, exist_ok=True) - cache_file.write_text(json.dumps(catalog_data, indent=2)) + cache_file.write_text(json.dumps(catalog_data, indent=2), encoding="utf-8") cache_meta.write_text( json.dumps( { @@ -281,7 +285,8 @@ def _fetch_single_catalog( "catalog_url": entry.url, }, indent=2, - ) + ), + encoding="utf-8", ) except OSError: pass # Cache is best-effort; proceed with fetched data @@ -473,9 +478,11 @@ def _validate(self) -> None: "must be lowercase alphanumeric with hyphens only" ) - if not re.match(r"^\d+\.\d+\.\d+$", integ["version"]): + try: + pkg_version.Version(integ["version"]) + except pkg_version.InvalidVersion: raise IntegrationDescriptorError( - f"Invalid version '{integ['version']}': must use semantic versioning (e.g., 1.0.0)" + f"Invalid version '{integ['version']}'" ) requires = self.data["requires"] diff --git a/tests/integrations/test_integration_catalog.py b/tests/integrations/test_integration_catalog.py index 2049e7c7ff..17259681f9 100644 --- a/tests/integrations/test_integration_catalog.py +++ b/tests/integrations/test_integration_catalog.py @@ -118,20 +118,10 @@ def test_empty_config_raises(self, tmp_path): # --------------------------------------------------------------------------- -# IntegrationCatalog — fetch & search (using local file:// catalog) +# IntegrationCatalog — fetch & search (using monkeypatched urlopen responses) # --------------------------------------------------------------------------- -def _write_catalog(path: Path, integrations: dict) -> None: - """Helper: write a catalog JSON file.""" - path.parent.mkdir(parents=True, exist_ok=True) - path.write_text(json.dumps({ - "schema_version": "1.0", - "updated_at": "2026-01-01T00:00:00Z", - "integrations": integrations, - }, indent=2)) - - class TestCatalogFetch: """Tests that use a local HTTP server stub via monkeypatch.""" From 3140f5314a524996d250914abd25d307f12ff241 Mon Sep 17 00:00:00 2001 From: Manfred Riem <15701806+mnriem@users.noreply.github.com> Date: Wed, 15 Apr 2026 15:11:37 -0500 Subject: [PATCH 08/19] fix: remove unused Path import, add comment to empty except --- src/specify_cli/integrations/catalog.py | 2 +- tests/integrations/test_integration_catalog.py | 1 - 2 files changed, 1 insertion(+), 2 deletions(-) diff --git a/src/specify_cli/integrations/catalog.py b/src/specify_cli/integrations/catalog.py index ff6e7c1e19..08641172d0 100644 --- a/src/specify_cli/integrations/catalog.py +++ b/src/specify_cli/integrations/catalog.py @@ -253,7 +253,7 @@ def _fetch_single_catalog( cache_file.unlink(missing_ok=True) cache_meta.unlink(missing_ok=True) except OSError: - pass + pass # Cache cleanup is best-effort; ignore deletion failures. try: with urllib.request.urlopen(entry.url, timeout=10) as resp: diff --git a/tests/integrations/test_integration_catalog.py b/tests/integrations/test_integration_catalog.py index 17259681f9..d0ea96397c 100644 --- a/tests/integrations/test_integration_catalog.py +++ b/tests/integrations/test_integration_catalog.py @@ -2,7 +2,6 @@ import json import os -from pathlib import Path import pytest import yaml From 40df71974efc0979ed5aadc510ca72845889284c Mon Sep 17 00:00:00 2001 From: Manfred Riem <15701806+mnriem@users.noreply.github.com> Date: Wed, 15 Apr 2026 15:27:10 -0500 Subject: [PATCH 09/19] fix: validate descriptor root is dict, add shared infra to upgrade - Add isinstance(self.data, dict) check at start of _validate() so non-mapping YAML roots raise IntegrationDescriptorError - Run _install_shared_infra() and ensure_executable_scripts() in upgrade command to match install/switch behavior --- src/specify_cli/__init__.py | 6 ++++++ src/specify_cli/integrations/catalog.py | 4 ++++ 2 files changed, 10 insertions(+) diff --git a/src/specify_cli/__init__.py b/src/specify_cli/__init__.py index 30b97a384d..ead165db7f 100644 --- a/src/specify_cli/__init__.py +++ b/src/specify_cli/__init__.py @@ -2286,6 +2286,12 @@ def integration_upgrade( selected_script = _resolve_script_type(project_root, script) + # Ensure shared infrastructure is present (safe to run unconditionally; + # _install_shared_infra merges missing files without overwriting). + _install_shared_infra(project_root, selected_script) + if os.name != "nt": + ensure_executable_scripts(project_root) + # Phase 1: Install new files (overwrites existing; old-only files remain) console.print(f"Upgrading integration: [cyan]{key}[/cyan]") new_manifest = IntegrationManifest(key, project_root, version=get_speckit_version()) diff --git a/src/specify_cli/integrations/catalog.py b/src/specify_cli/integrations/catalog.py index 08641172d0..2388fe704b 100644 --- a/src/specify_cli/integrations/catalog.py +++ b/src/specify_cli/integrations/catalog.py @@ -449,6 +449,10 @@ def _load(path: Path) -> dict: # -- Validation ------------------------------------------------------- def _validate(self) -> None: + if not isinstance(self.data, dict): + raise IntegrationDescriptorError( + f"Descriptor root must be a YAML mapping, got {type(self.data).__name__}" + ) for field in self.REQUIRED_TOP_LEVEL: if field not in self.data: raise IntegrationDescriptorError( From cefb058bd97a4c07b1dc5357b4128a7e98bbe184 Mon Sep 17 00:00:00 2001 From: Manfred Riem <15701806+mnriem@users.noreply.github.com> Date: Wed, 15 Apr 2026 15:50:57 -0500 Subject: [PATCH 10/19] fix: address sixth round of PR review feedback - Validate integration.id/name/version/description are strings - Catch TypeError in pkg_version.Version() for non-string versions - Swap validation order: check catalogs type before emptiness - Isolate TestActiveCatalogs from user ~/.specify/ via monkeypatch --- src/specify_cli/integrations/catalog.py | 16 ++++++++++------ tests/integrations/test_integration_catalog.py | 4 +++- 2 files changed, 13 insertions(+), 7 deletions(-) diff --git a/src/specify_cli/integrations/catalog.py b/src/specify_cli/integrations/catalog.py index 2388fe704b..9f3d4d3640 100644 --- a/src/specify_cli/integrations/catalog.py +++ b/src/specify_cli/integrations/catalog.py @@ -111,16 +111,16 @@ def _load_catalog_config( f"Invalid catalog config {config_path}: expected a YAML mapping at the root" ) catalogs_data = data.get("catalogs", []) - if not catalogs_data: - raise IntegrationCatalogError( - f"Catalog config {config_path} exists but contains no 'catalogs' entries. " - f"Remove the file to use built-in defaults, or add valid catalog entries." - ) if not isinstance(catalogs_data, list): raise IntegrationCatalogError( f"Invalid catalog config: 'catalogs' must be a list, " f"got {type(catalogs_data).__name__}" ) + if not catalogs_data: + raise IntegrationCatalogError( + f"Catalog config {config_path} exists but contains no 'catalogs' entries. " + f"Remove the file to use built-in defaults, or add valid catalog entries." + ) entries: List[IntegrationCatalogEntry] = [] skipped: List[int] = [] for idx, item in enumerate(catalogs_data): @@ -475,6 +475,10 @@ def _validate(self) -> None: raise IntegrationDescriptorError( f"Missing integration.{field}" ) + if not isinstance(integ[field], str): + raise IntegrationDescriptorError( + f"integration.{field} must be a string, got {type(integ[field]).__name__}" + ) if not re.match(r"^[a-z0-9-]+$", integ["id"]): raise IntegrationDescriptorError( @@ -484,7 +488,7 @@ def _validate(self) -> None: try: pkg_version.Version(integ["version"]) - except pkg_version.InvalidVersion: + except (pkg_version.InvalidVersion, TypeError): raise IntegrationDescriptorError( f"Invalid version '{integ['version']}'" ) diff --git a/tests/integrations/test_integration_catalog.py b/tests/integrations/test_integration_catalog.py index d0ea96397c..11717f576d 100644 --- a/tests/integrations/test_integration_catalog.py +++ b/tests/integrations/test_integration_catalog.py @@ -73,7 +73,9 @@ def test_missing_host_rejected(self): class TestActiveCatalogs: - def test_defaults_when_no_config(self, tmp_path): + def test_defaults_when_no_config(self, tmp_path, monkeypatch): + monkeypatch.setenv("HOME", str(tmp_path)) + monkeypatch.setenv("USERPROFILE", str(tmp_path)) (tmp_path / ".specify").mkdir() cat = IntegrationCatalog(tmp_path) active = cat.get_active_catalogs() From 2a043fb81702e659d02075f5548172b4e1febe31 Mon Sep 17 00:00:00 2001 From: Manfred Riem <15701806+mnriem@users.noreply.github.com> Date: Wed, 15 Apr 2026 16:03:50 -0500 Subject: [PATCH 11/19] fix: address seventh round of PR review feedback - Update docs: version field uses PEP 440, not semver - Harden search() against non-string author/name/description fields - Validate requires.speckit_version is a non-empty string - Validate command name/file are non-empty strings, file is safe relative path - Handle stale symlinks in upgrade cleanup - Document catalog configuration stack in README.md --- integrations/CONTRIBUTING.md | 4 ++-- integrations/README.md | 21 +++++++++++++++- src/specify_cli/__init__.py | 2 +- src/specify_cli/integrations/catalog.py | 32 +++++++++++++++++++++---- 4 files changed, 51 insertions(+), 8 deletions(-) diff --git a/integrations/CONTRIBUTING.md b/integrations/CONTRIBUTING.md index 11dec3d984..fbfcf85b17 100644 --- a/integrations/CONTRIBUTING.md +++ b/integrations/CONTRIBUTING.md @@ -82,8 +82,8 @@ provides: |-------|------| | `schema_version` | Must be `"1.0"` | | `integration.id` | Lowercase alphanumeric + hyphens (`^[a-z0-9-]+$`) | -| `integration.version` | Valid semantic version | -| `requires.speckit_version` | Required field; current validation checks presence only | +| `integration.version` | Valid PEP 440 version (parsed with `packaging.version.Version()`) | +| `requires.speckit_version` | Required field; specify a version constraint such as `>=0.6.0` (current validation checks presence only) | | `provides` | Must include at least one command or script | | `provides.commands[].name` | String identifier | | `provides.commands[].file` | Relative path to template file | diff --git a/integrations/README.md b/integrations/README.md index 5c7c6ea1ed..b755e0416d 100644 --- a/integrations/README.md +++ b/integrations/README.md @@ -12,6 +12,25 @@ Contains integrations that ship with Spec Kit. These are maintained by the core Community-contributed integrations. Listed for discovery only — users install from the source repositories. +## Catalog Configuration + +The catalog stack is resolved in this order (first match wins): + +1. **Environment variable** — `SPECKIT_INTEGRATION_CATALOG_URL` overrides all catalogs with a single URL +2. **Project config** — `.specify/integration-catalogs.yml` in the project root +3. **User config** — `~/.specify/integration-catalogs.yml` in the user home directory +4. **Built-in defaults** — `catalog.json` + `catalog.community.json` + +Example `integration-catalogs.yml`: + +```yaml +catalogs: + - url: "https://example.com/my-catalog.json" + name: "my-catalog" + priority: 1 + install_allowed: true +``` + ## CLI Commands ```bash @@ -99,7 +118,7 @@ Both catalog files follow the same JSON schema: |-------|------|----------|-------------| | `id` | string | Yes | Unique ID (lowercase alphanumeric + hyphens) | | `name` | string | Yes | Human-readable display name | -| `version` | string | Yes | Semantic version | +| `version` | string | Yes | PEP 440 version (e.g., `1.0.0`, `1.0.0a1`) | | `description` | string | Yes | One-line description | | `author` | string | No | Author name or organization | | `repository` | string | No | Source repository URL | diff --git a/src/specify_cli/__init__.py b/src/specify_cli/__init__.py index ead165db7f..2e2257b361 100644 --- a/src/specify_cli/__init__.py +++ b/src/specify_cli/__init__.py @@ -2335,7 +2335,7 @@ def integration_upgrade( normed.relative_to(resolved_project_root) except (ValueError, OSError): continue - if path.exists() and path.is_file(): + if path.is_symlink() or path.is_file(): try: path.unlink() stale_removed += 1 diff --git a/src/specify_cli/integrations/catalog.py b/src/specify_cli/integrations/catalog.py index 9f3d4d3640..d18c637232 100644 --- a/src/specify_cli/integrations/catalog.py +++ b/src/specify_cli/integrations/catalog.py @@ -357,7 +357,10 @@ def search( """Search catalogs for integrations matching the given filters.""" results: List[Dict[str, Any]] = [] for item in self._get_merged_integrations(): - if author and item.get("author", "").lower() != author.lower(): + author_val = item.get("author", "") + if not isinstance(author_val, str): + author_val = str(author_val) if author_val is not None else "" + if author and author_val.lower() != author.lower(): continue if tag: raw_tags = item.get("tags", []) @@ -367,11 +370,14 @@ def search( if query: raw_tags = item.get("tags", []) tags_list = raw_tags if isinstance(raw_tags, list) else [] + name_val = item.get("name", "") + desc_val = item.get("description", "") + id_val = item.get("id", "") haystack = " ".join( [ - item.get("name", ""), - item.get("description", ""), - item.get("id", ""), + str(name_val) if name_val else "", + str(desc_val) if desc_val else "", + str(id_val) if id_val else "", ] + [t for t in tags_list if isinstance(t, str)] ).lower() @@ -502,6 +508,10 @@ def _validate(self) -> None: raise IntegrationDescriptorError( "Missing requires.speckit_version" ) + if not isinstance(requires["speckit_version"], str) or not requires["speckit_version"].strip(): + raise IntegrationDescriptorError( + "requires.speckit_version must be a non-empty string" + ) provides = self.data["provides"] if not isinstance(provides, dict): @@ -531,6 +541,20 @@ def _validate(self) -> None: raise IntegrationDescriptorError( "Command entry missing 'name' or 'file'" ) + cmd_name = cmd["name"] + cmd_file = cmd["file"] + if not isinstance(cmd_name, str) or not cmd_name.strip(): + raise IntegrationDescriptorError( + "Command entry 'name' must be a non-empty string" + ) + if not isinstance(cmd_file, str) or not cmd_file.strip(): + raise IntegrationDescriptorError( + "Command entry 'file' must be a non-empty string" + ) + if os.path.isabs(cmd_file) or ".." in Path(cmd_file).parts: + raise IntegrationDescriptorError( + f"Command entry 'file' must be a relative path without '..': {cmd_file}" + ) # -- Property accessors ----------------------------------------------- From 43f4d8d0bf977617a9c1170d70c453fb81bc473f Mon Sep 17 00:00:00 2001 From: Manfred Riem <15701806+mnriem@users.noreply.github.com> Date: Wed, 15 Apr 2026 16:23:39 -0500 Subject: [PATCH 12/19] fix: validate script entries, remove destructive teardown from upgrade rollback MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit - Validate provides.scripts entries are non-empty strings with safe relative paths - Remove teardown from upgrade rollback since setup overwrites in-place — teardown would delete files that were working before the upgrade --- src/specify_cli/__init__.py | 9 +++------ src/specify_cli/integrations/catalog.py | 9 +++++++++ 2 files changed, 12 insertions(+), 6 deletions(-) diff --git a/src/specify_cli/__init__.py b/src/specify_cli/__init__.py index 2e2257b361..e816418081 100644 --- a/src/specify_cli/__init__.py +++ b/src/specify_cli/__init__.py @@ -2312,13 +2312,10 @@ def integration_upgrade( _write_integration_json(project_root, key, selected_script) _update_init_options_for_integration(project_root, integration, script_type=selected_script) except Exception as exc: - try: - integration.teardown(project_root, new_manifest, force=True) - except Exception as teardown_exc: - console.print( - f"[yellow]Warning:[/yellow] Teardown during rollback also failed: {teardown_exc}" - ) + # Don't teardown — setup overwrites in-place, so teardown would + # delete files that were working before the upgrade. Just report. console.print(f"[red]Error:[/red] Failed to upgrade integration: {exc}") + console.print("[yellow]The previous integration files may still be in place.[/yellow]") raise typer.Exit(1) # Phase 2: Remove stale files from old manifest that are not in the new one diff --git a/src/specify_cli/integrations/catalog.py b/src/specify_cli/integrations/catalog.py index d18c637232..9f5364b8fe 100644 --- a/src/specify_cli/integrations/catalog.py +++ b/src/specify_cli/integrations/catalog.py @@ -555,6 +555,15 @@ def _validate(self) -> None: raise IntegrationDescriptorError( f"Command entry 'file' must be a relative path without '..': {cmd_file}" ) + for script_entry in scripts: + if not isinstance(script_entry, str) or not script_entry.strip(): + raise IntegrationDescriptorError( + "Script entry must be a non-empty string" + ) + if os.path.isabs(script_entry) or ".." in Path(script_entry).parts: + raise IntegrationDescriptorError( + f"Script entry must be a relative path without '..': {script_entry}" + ) # -- Property accessors ----------------------------------------------- From b62156724f6ae6d6b39c4823ffc2314259d1c64e Mon Sep 17 00:00:00 2001 From: Manfred Riem <15701806+mnriem@users.noreply.github.com> Date: Wed, 15 Apr 2026 16:37:45 -0500 Subject: [PATCH 13/19] fix: use consistent resolved root for stale-file cleanup paths --- src/specify_cli/__init__.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/src/specify_cli/__init__.py b/src/specify_cli/__init__.py index e816418081..5de66c700f 100644 --- a/src/specify_cli/__init__.py +++ b/src/specify_cli/__init__.py @@ -2319,17 +2319,17 @@ def integration_upgrade( raise typer.Exit(1) # Phase 2: Remove stale files from old manifest that are not in the new one - resolved_project_root = project_root.resolve() + root = project_root.resolve() old_files = set(old_manifest.files.keys()) new_files = set(new_manifest.files.keys()) stale_files = old_files - new_files stale_removed = 0 for rel in stale_files: - path = project_root / rel + path = root / rel # Validate containment to prevent path traversal from tampered manifests try: normed = Path(os.path.normpath(path)) - normed.relative_to(resolved_project_root) + normed.relative_to(root) except (ValueError, OSError): continue if path.is_symlink() or path.is_file(): @@ -2338,7 +2338,7 @@ def integration_upgrade( stale_removed += 1 # Clean up empty parent directories up to project root parent = path.parent - while parent != project_root: + while parent != root: try: parent.rmdir() except OSError: From d636aaa421697a18f3ec2ad32b14a58a7edce4fa Mon Sep 17 00:00:00 2001 From: Manfred Riem <15701806+mnriem@users.noreply.github.com> Date: Wed, 15 Apr 2026 16:58:07 -0500 Subject: [PATCH 14/19] fix: validate redirect URL and reject drive-qualified paths - Validate final URL after redirects with _validate_catalog_url() - Reject paths with Path.drive or Path.anchor for Windows safety - Update FakeResponse mocks with geturl() method --- src/specify_cli/integrations/catalog.py | 8 ++++++-- tests/integrations/test_integration_catalog.py | 15 +++++++++++---- 2 files changed, 17 insertions(+), 6 deletions(-) diff --git a/src/specify_cli/integrations/catalog.py b/src/specify_cli/integrations/catalog.py index 9f5364b8fe..3a030cdf3b 100644 --- a/src/specify_cli/integrations/catalog.py +++ b/src/specify_cli/integrations/catalog.py @@ -257,6 +257,10 @@ def _fetch_single_catalog( try: with urllib.request.urlopen(entry.url, timeout=10) as resp: + # Validate final URL after redirects + final_url = resp.geturl() + if final_url != entry.url: + self._validate_catalog_url(final_url) catalog_data = json.loads(resp.read()) if not isinstance(catalog_data, dict): @@ -551,7 +555,7 @@ def _validate(self) -> None: raise IntegrationDescriptorError( "Command entry 'file' must be a non-empty string" ) - if os.path.isabs(cmd_file) or ".." in Path(cmd_file).parts: + if os.path.isabs(cmd_file) or ".." in Path(cmd_file).parts or Path(cmd_file).drive or Path(cmd_file).anchor: raise IntegrationDescriptorError( f"Command entry 'file' must be a relative path without '..': {cmd_file}" ) @@ -560,7 +564,7 @@ def _validate(self) -> None: raise IntegrationDescriptorError( "Script entry must be a non-empty string" ) - if os.path.isabs(script_entry) or ".." in Path(script_entry).parts: + if os.path.isabs(script_entry) or ".." in Path(script_entry).parts or Path(script_entry).drive or Path(script_entry).anchor: raise IntegrationDescriptorError( f"Script entry must be a relative path without '..': {script_entry}" ) diff --git a/tests/integrations/test_integration_catalog.py b/tests/integrations/test_integration_catalog.py index 11717f576d..0e7e93cd32 100644 --- a/tests/integrations/test_integration_catalog.py +++ b/tests/integrations/test_integration_catalog.py @@ -130,12 +130,16 @@ def _patch_urlopen(self, monkeypatch, catalog_data): """Patch urllib.request.urlopen to return *catalog_data*.""" class FakeResponse: - def __init__(self, data): + def __init__(self, data, url=""): self._data = json.dumps(data).encode() + self._url = url def read(self): return self._data + def geturl(self): + return self._url + def __enter__(self): return self @@ -143,7 +147,7 @@ def __exit__(self, *a): pass def fake_urlopen(url, timeout=10): - return FakeResponse(catalog_data) + return FakeResponse(catalog_data, url) import urllib.request monkeypatch.setattr(urllib.request, "urlopen", fake_urlopen) @@ -431,16 +435,19 @@ def test_list_catalog_flag(self, tmp_path, monkeypatch): import urllib.request class FakeResponse: - def __init__(self, data): + def __init__(self, data, url=""): self._data = json.dumps(data).encode() + self._url = url def read(self): return self._data + def geturl(self): + return self._url def __enter__(self): return self def __exit__(self, *a): pass - monkeypatch.setattr(urllib.request, "urlopen", lambda url, timeout=10: FakeResponse(catalog)) + monkeypatch.setattr(urllib.request, "urlopen", lambda url, timeout=10: FakeResponse(catalog, url)) old = os.getcwd() try: From d4cbdb1d375229004b3ee5e2579bc0f44ac468b1 Mon Sep 17 00:00:00 2001 From: Manfred Riem <15701806+mnriem@users.noreply.github.com> Date: Wed, 15 Apr 2026 17:14:15 -0500 Subject: [PATCH 15/19] fix: fix docstring backticks, assert file modification in upgrade tests --- src/specify_cli/integrations/catalog.py | 2 +- .../integrations/test_integration_catalog.py | 20 +++++++++---------- 2 files changed, 11 insertions(+), 11 deletions(-) diff --git a/src/specify_cli/integrations/catalog.py b/src/specify_cli/integrations/catalog.py index 3a030cdf3b..6fdc7d578a 100644 --- a/src/specify_cli/integrations/catalog.py +++ b/src/specify_cli/integrations/catalog.py @@ -169,7 +169,7 @@ def get_active_catalogs(self) -> List[IntegrationCatalogEntry]: Resolution: 1. ``SPECKIT_INTEGRATION_CATALOG_URL`` env var - 2. Project ```.specify/integration-catalogs.yml``` + 2. Project ``.specify/integration-catalogs.yml`` 3. User ``~/.specify/integration-catalogs.yml`` 4. Built-in defaults (built-in + community) """ diff --git a/tests/integrations/test_integration_catalog.py b/tests/integrations/test_integration_catalog.py index 0e7e93cd32..17e64e1085 100644 --- a/tests/integrations/test_integration_catalog.py +++ b/tests/integrations/test_integration_catalog.py @@ -563,11 +563,11 @@ def test_upgrade_blocks_on_modified_files(self, tmp_path): assert manifest_path.exists(), "Manifest should exist after init" manifest_data = json.loads(manifest_path.read_text()) tracked_files = manifest_data.get("files", {}) - if tracked_files: - first_rel = next(iter(tracked_files)) - target_file = project / first_rel - if target_file.exists(): - target_file.write_text("MODIFIED CONTENT\n") + assert tracked_files, "Manifest should track at least one file" + first_rel = next(iter(tracked_files)) + target_file = project / first_rel + assert target_file.exists(), f"Tracked file {first_rel} should exist" + target_file.write_text("MODIFIED CONTENT\n") old = os.getcwd() try: @@ -588,11 +588,11 @@ def test_upgrade_force_overwrites_modified(self, tmp_path): manifest_path = project / ".specify" / "integrations" / "copilot.manifest.json" manifest_data = json.loads(manifest_path.read_text()) tracked_files = manifest_data.get("files", {}) - if tracked_files: - first_rel = next(iter(tracked_files)) - target_file = project / first_rel - if target_file.exists(): - target_file.write_text("MODIFIED CONTENT\n") + assert tracked_files, "Manifest should track at least one file" + first_rel = next(iter(tracked_files)) + target_file = project / first_rel + assert target_file.exists(), f"Tracked file {first_rel} should exist" + target_file.write_text("MODIFIED CONTENT\n") old = os.getcwd() try: From 88cde3cfd8d380e46e08eb33c4b1b74da43714fd Mon Sep 17 00:00:00 2001 From: Manfred Riem <15701806+mnriem@users.noreply.github.com> Date: Wed, 15 Apr 2026 17:25:25 -0500 Subject: [PATCH 16/19] docs: clarify directory naming convention for hyphenated integration keys --- integrations/CONTRIBUTING.md | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/integrations/CONTRIBUTING.md b/integrations/CONTRIBUTING.md index fbfcf85b17..77a50d4d98 100644 --- a/integrations/CONTRIBUTING.md +++ b/integrations/CONTRIBUTING.md @@ -8,10 +8,11 @@ Built-in integrations are maintained by the Spec Kit core team and ship with the ### Checklist -1. **Create the integration subpackage** under `src/specify_cli/integrations//` +1. **Create the integration subpackage** under `src/specify_cli/integrations//` + — `` matches the integration key when it contains no hyphens (e.g., `gemini`), or replaces hyphens with underscores when it does (e.g., key `cursor-agent` → directory `cursor_agent/`, key `kiro-cli` → directory `kiro_cli/`). Python package names cannot use hyphens. 2. **Implement the integration class** extending `MarkdownIntegration`, `TomlIntegration`, or `SkillsIntegration` 3. **Register the integration** in `src/specify_cli/integrations/__init__.py` -4. **Add tests** under `tests/integrations/test_integration_.py` +4. **Add tests** under `tests/integrations/test_integration_.py` 5. **Add a catalog entry** in `integrations/catalog.json` 6. **Update documentation** in `AGENTS.md` and `README.md` From 1ccc3138e2c7cc25ac388ec07129e09020765dec Mon Sep 17 00:00:00 2001 From: Manfred Riem <15701806+mnriem@users.noreply.github.com> Date: Wed, 15 Apr 2026 17:38:39 -0500 Subject: [PATCH 17/19] fix: correct key type hint, isolate all catalog tests from env - Fix key parameter type to str | None (defaults to None) - Add HOME/USERPROFILE monkeypatch and clear SPECKIT_INTEGRATION_CATALOG_URL in all TestCatalogFetch tests for full environment isolation --- src/specify_cli/__init__.py | 2 +- tests/integrations/test_integration_catalog.py | 16 ++++++++++++++++ 2 files changed, 17 insertions(+), 1 deletion(-) diff --git a/src/specify_cli/__init__.py b/src/specify_cli/__init__.py index 5de66c700f..87ce5376bc 100644 --- a/src/specify_cli/__init__.py +++ b/src/specify_cli/__init__.py @@ -2221,7 +2221,7 @@ def integration_switch( @integration_app.command("upgrade") def integration_upgrade( - key: str = typer.Argument(None, help="Integration key to upgrade (default: current integration)"), + key: str | None = typer.Argument(None, help="Integration key to upgrade (default: current integration)"), force: bool = typer.Option(False, "--force", help="Force upgrade even if files are modified"), script: str | None = typer.Option(None, "--script", help="Script type: sh or ps (default: from init-options.json or platform default)"), integration_options: str | None = typer.Option(None, "--integration-options", help="Options for the integration"), diff --git a/tests/integrations/test_integration_catalog.py b/tests/integrations/test_integration_catalog.py index 17e64e1085..3d0a14acdc 100644 --- a/tests/integrations/test_integration_catalog.py +++ b/tests/integrations/test_integration_catalog.py @@ -76,6 +76,7 @@ class TestActiveCatalogs: def test_defaults_when_no_config(self, tmp_path, monkeypatch): monkeypatch.setenv("HOME", str(tmp_path)) monkeypatch.setenv("USERPROFILE", str(tmp_path)) + monkeypatch.delenv("SPECKIT_INTEGRATION_CATALOG_URL", raising=False) (tmp_path / ".specify").mkdir() cat = IntegrationCatalog(tmp_path) active = cat.get_active_catalogs() @@ -153,6 +154,9 @@ def fake_urlopen(url, timeout=10): monkeypatch.setattr(urllib.request, "urlopen", fake_urlopen) def test_fetch_and_search_all(self, tmp_path, monkeypatch): + monkeypatch.setenv("HOME", str(tmp_path)) + monkeypatch.setenv("USERPROFILE", str(tmp_path)) + monkeypatch.delenv("SPECKIT_INTEGRATION_CATALOG_URL", raising=False) (tmp_path / ".specify").mkdir() cat = IntegrationCatalog(tmp_path) @@ -178,6 +182,9 @@ def test_fetch_and_search_all(self, tmp_path, monkeypatch): assert "acme-coder" in ids def test_search_by_tag(self, tmp_path, monkeypatch): + monkeypatch.setenv("HOME", str(tmp_path)) + monkeypatch.setenv("USERPROFILE", str(tmp_path)) + monkeypatch.delenv("SPECKIT_INTEGRATION_CATALOG_URL", raising=False) (tmp_path / ".specify").mkdir() cat = IntegrationCatalog(tmp_path) @@ -195,6 +202,9 @@ def test_search_by_tag(self, tmp_path, monkeypatch): assert all("cli" in r.get("tags", []) for r in results) def test_search_by_query(self, tmp_path, monkeypatch): + monkeypatch.setenv("HOME", str(tmp_path)) + monkeypatch.setenv("USERPROFILE", str(tmp_path)) + monkeypatch.delenv("SPECKIT_INTEGRATION_CATALOG_URL", raising=False) (tmp_path / ".specify").mkdir() cat = IntegrationCatalog(tmp_path) @@ -213,6 +223,9 @@ def test_search_by_query(self, tmp_path, monkeypatch): assert results[0]["id"] == "claude" def test_get_integration_info(self, tmp_path, monkeypatch): + monkeypatch.setenv("HOME", str(tmp_path)) + monkeypatch.setenv("USERPROFILE", str(tmp_path)) + monkeypatch.delenv("SPECKIT_INTEGRATION_CATALOG_URL", raising=False) (tmp_path / ".specify").mkdir() cat = IntegrationCatalog(tmp_path) @@ -232,6 +245,9 @@ def test_get_integration_info(self, tmp_path, monkeypatch): assert cat.get_integration_info("nonexistent") is None def test_invalid_catalog_format(self, tmp_path, monkeypatch): + monkeypatch.setenv("HOME", str(tmp_path)) + monkeypatch.setenv("USERPROFILE", str(tmp_path)) + monkeypatch.delenv("SPECKIT_INTEGRATION_CATALOG_URL", raising=False) (tmp_path / ".specify").mkdir() cat = IntegrationCatalog(tmp_path) From 90e47f462a0c22c33d7305f519b2bde189f26eab Mon Sep 17 00:00:00 2001 From: Manfred Riem <15701806+mnriem@users.noreply.github.com> Date: Wed, 15 Apr 2026 17:58:34 -0500 Subject: [PATCH 18/19] fix: neutralize catalog table title, handle non-dict cache metadata --- src/specify_cli/__init__.py | 2 +- src/specify_cli/integrations/catalog.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/src/specify_cli/__init__.py b/src/specify_cli/__init__.py index 87ce5376bc..d54b0f1abc 100644 --- a/src/specify_cli/__init__.py +++ b/src/specify_cli/__init__.py @@ -1806,7 +1806,7 @@ def integration_list( console.print("[yellow]No integrations found in catalog.[/yellow]") return - table = Table(title="Integration Catalog (built-in + community)") + table = Table(title="Integration Catalog") table.add_column("ID", style="cyan") table.add_column("Name") table.add_column("Version") diff --git a/src/specify_cli/integrations/catalog.py b/src/specify_cli/integrations/catalog.py index 6fdc7d578a..66e31646e3 100644 --- a/src/specify_cli/integrations/catalog.py +++ b/src/specify_cli/integrations/catalog.py @@ -247,7 +247,7 @@ def _fetch_single_catalog( age = (datetime.now(timezone.utc) - cached_at).total_seconds() if age < self.CACHE_DURATION: return json.loads(cache_file.read_text(encoding="utf-8")) - except (json.JSONDecodeError, ValueError, KeyError, TypeError): + except (json.JSONDecodeError, ValueError, KeyError, TypeError, AttributeError): # Cache is invalid or stale metadata; delete and refetch from source. try: cache_file.unlink(missing_ok=True) From dc44ee71de00f5add553461d19b432c2956acc5b Mon Sep 17 00:00:00 2001 From: Manfred Riem <15701806+mnriem@users.noreply.github.com> Date: Wed, 15 Apr 2026 18:11:27 -0500 Subject: [PATCH 19/19] fix: validate requires.tools entries in descriptor --- src/specify_cli/integrations/catalog.py | 16 ++++++++++++++++ 1 file changed, 16 insertions(+) diff --git a/src/specify_cli/integrations/catalog.py b/src/specify_cli/integrations/catalog.py index 66e31646e3..4ba8ded004 100644 --- a/src/specify_cli/integrations/catalog.py +++ b/src/specify_cli/integrations/catalog.py @@ -516,6 +516,22 @@ def _validate(self) -> None: raise IntegrationDescriptorError( "requires.speckit_version must be a non-empty string" ) + tools = requires.get("tools") + if tools is not None: + if not isinstance(tools, list): + raise IntegrationDescriptorError( + "requires.tools must be a list" + ) + for tool in tools: + if not isinstance(tool, dict): + raise IntegrationDescriptorError( + "Each requires.tools entry must be a mapping" + ) + tool_name = tool.get("name") + if not isinstance(tool_name, str) or not tool_name.strip(): + raise IntegrationDescriptorError( + "requires.tools entry 'name' must be a non-empty string" + ) provides = self.data["provides"] if not isinstance(provides, dict):