diff --git a/CHANGELOG.md b/CHANGELOG.md index 58d2ed5e..40cd4ff9 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -7,7 +7,31 @@ All notable changes to this project will be documented in this file. **Important:** Changes need to be documented below this block as this is the header section. Each section should be separated by a horizontal rule. Newer changelog entries need to be added on top of prior ones to keep the history chronological with most recent changes first. + +--- + +## [0.36.0] - 2026-02-21 + +### Added + +- Enhanced `specfact backlog add` interactive flow with multiline capture (`::END::` sentinel), acceptance criteria, priority, story points, parent selection, and description format selection (`markdown` or `classic`). +- New `specfact backlog init-config` command to scaffold `.specfact/backlog-config.yaml` with safe provider defaults. +- Expanded `specfact backlog map-fields` into a multi-provider setup flow (`ado`, `github`) with guided discovery/validation and canonical config persistence under `.specfact/backlog-config.yaml`. +- GitHub backlog create flow now supports native sub-issue parent linking and optional issue-type / ProjectV2 Type assignment using configured GraphQL metadata. +- Centralized retry support for backlog adapter write operations with duplicate-safe behavior for non-idempotent creates/comments. + +### Fixed + +- Azure DevOps interactive sprint/iteration selection now resolves context from `--project-id` so available iterations are discoverable during `backlog add`. +- Azure DevOps parent candidate discovery no longer hides valid parents via implicit current-iteration filtering in hierarchy selection flows. +- GitHub backlog field/type extraction now tolerates non-list labels and dict-shaped `issue_type` payloads (`name`/`title`) for more reliable type inference. + +### Changed + +- Backlog documentation now reflects the current `specfact backlog` command surface and updated `backlog add` behavior in both guide and command reference docs. + --- + ## [0.35.0] - 2026-02-20 ### Added diff --git a/docs/guides/agile-scrum-workflows.md b/docs/guides/agile-scrum-workflows.md index 8863786b..a01f2b62 100644 --- a/docs/guides/agile-scrum-workflows.md +++ b/docs/guides/agile-scrum-workflows.md @@ -10,6 +10,62 @@ This guide explains how to use SpecFact CLI for agile/scrum workflows, including Preferred command paths are `specfact backlog ceremony standup ...` and `specfact backlog ceremony refinement ...`. Legacy `backlog daily`/`backlog refine` remain available for compatibility. +Backlog module command surface: + +- `specfact backlog add` +- `specfact backlog analyze-deps` +- `specfact backlog trace-impact` +- `specfact backlog verify-readiness` +- `specfact backlog diff` +- `specfact backlog sync` +- `specfact backlog promote` +- `specfact backlog generate-release-notes` +- `specfact backlog delta status|impact|cost-estimate|rollback-analysis` + +## Backlog Issue Creation (`backlog add`) + +Use `specfact backlog add` to create a backlog item with optional parent hierarchy validation and DoR checks. + +```bash +# Non-interactive creation +specfact backlog add \ + --adapter github \ + --project-id nold-ai/specfact-cli \ + --template github_projects \ + --type story \ + --parent FEAT-123 \ + --title "Implement X" \ + --body "Acceptance criteria: ..." \ + --non-interactive + +# Enforce Definition of Ready from .specfact/dor.yaml before create +specfact backlog add \ + --adapter github \ + --project-id nold-ai/specfact-cli \ + --type story \ + --title "Implement X" \ + --body "Acceptance criteria: ..." \ + --check-dor \ + --repo-path . + +# Interactive ADO flow with sprint/iteration selection and story-quality fields +specfact backlog add \ + --adapter ado \ + --project-id "dominikusnold/Specfact CLI" +``` + +Key behavior: + +- validates parent exists in current backlog graph before creating +- validates child-parent type compatibility using `creation_hierarchy` from config/template +- supports interactive prompts when required fields are missing (unless `--non-interactive`) +- prompts for ADO sprint/iteration selection and resolves available iterations from `--project-id` context +- supports multiline body and acceptance criteria capture (default sentinel `::END::`) +- captures priority and story points for story-like items +- supports description rendering mode (`markdown` or `classic`) +- auto-selects template by adapter when omitted (`ado_scrum` for ADO, `github_projects` for GitHub) +- creates via adapter protocol (`github` or `ado`) and prints created `id`, `key`, and `url` + ## Overview SpecFact CLI supports real-world agile/scrum practices through: diff --git a/docs/reference/commands.md b/docs/reference/commands.md index 8a4c008f..91327636 100644 --- a/docs/reference/commands.md +++ b/docs/reference/commands.md @@ -3797,14 +3797,14 @@ specfact sync repository --repo . --watch --interval 2 --confidence 0.7 ### `backlog` - Backlog Refinement and Template Management -Backlog refinement commands for AI-assisted template-driven refinement of DevOps backlog items. +Backlog refinement and dependency commands grouped under the `specfact backlog` command family. **Command Topology (recommended):** - `specfact backlog ceremony standup ...` - `specfact backlog ceremony refinement ...` - `specfact backlog delta status|impact|cost-estimate|rollback-analysis ...` -- `specfact backlog analyze-deps|trace-impact|sync|verify-readiness|diff|promote|generate-release-notes ...` +- `specfact backlog add|analyze-deps|trace-impact|sync|verify-readiness|diff|promote|generate-release-notes ...` Compatibility commands `specfact backlog daily` and `specfact backlog refine` remain available, but ceremony entrypoints are preferred for discoverability. @@ -3855,6 +3855,33 @@ specfact backlog delta cost-estimate --project-id 1 --adapter github specfact backlog delta rollback-analysis --project-id 1 --adapter github ``` +#### `backlog add` + +Create a backlog item with optional parent hierarchy validation and DoR checks. + +```bash +specfact backlog add --project-id [OPTIONS] +``` + +**Common options:** + +- `--adapter ADAPTER` - Backlog adapter id (default: `github`) +- `--template TEMPLATE` - Mapping template (default is adapter-aware: `github_projects` for GitHub, `ado_scrum` for ADO) +- `--type TYPE` - Child type to create (for example `story`, `task`, `feature`) +- `--parent REF` - Optional parent reference (id/key/title); validated against graph +- `--title TEXT` - Issue title +- `--body TEXT` - Issue description/body +- `--acceptance-criteria TEXT` - Acceptance criteria content (also supported via interactive multiline input) +- `--priority TEXT` - Optional priority value (for example `1`, `high`, `P1`) +- `--story-points VALUE` - Optional story points (integer or float) +- `--sprint TEXT` - Optional sprint/iteration path assignment +- `--body-end-marker TEXT` - Sentinel marker for multiline input (default: `::END::`) +- `--description-format TEXT` - Description rendering mode (`markdown` or `classic`) +- `--non-interactive` - Fail fast on missing required inputs instead of prompting +- `--check-dor` - Validate draft against `.specfact/dor.yaml` before create +- `--repo-path PATH` - Repository path used to load DoR configuration (default `.`) +- `--custom-config PATH` - Optional config containing `creation_hierarchy` + #### `backlog analyze-deps` Build and analyze backlog dependency graph for a provider project. @@ -3868,7 +3895,7 @@ specfact backlog analyze-deps --project-id [OPTIONS] **Migration note:** `specfact module` is the canonical lifecycle command group. Init lifecycle flags remain supported as compatibility aliases. - `--adapter ADAPTER` - Backlog adapter id (default: `github`) -- `--template TEMPLATE` - Mapping template (default: `github_projects`) +- `--template TEMPLATE` - Mapping template (default is adapter-aware: `github_projects` for GitHub, `ado_scrum` for ADO) - `--custom-config PATH` - Optional custom mapping YAML - `--output PATH` - Optional markdown summary output - `--json-export PATH` - Optional graph JSON export @@ -3886,7 +3913,7 @@ specfact backlog trace-impact --project-id [OPTIONS] **Migration note:** `specfact module` is the canonical lifecycle command group. Init lifecycle flags remain supported as compatibility aliases. - `--adapter ADAPTER` - Backlog adapter id (default: `github`) -- `--template TEMPLATE` - Mapping template (default: `github_projects`) +- `--template TEMPLATE` - Mapping template (default is adapter-aware: `github_projects` for GitHub, `ado_scrum` for ADO) - `--custom-config PATH` - Optional custom mapping YAML #### `backlog verify-readiness` @@ -3902,7 +3929,7 @@ specfact backlog verify-readiness --project-id [OPTIONS] **Migration note:** `specfact module` is the canonical lifecycle command group. Init lifecycle flags remain supported as compatibility aliases. - `--adapter ADAPTER` - Backlog adapter id (default: `github`) -- `--template TEMPLATE` - Mapping template (default: `github_projects`) +- `--template TEMPLATE` - Mapping template (default is adapter-aware: `github_projects` for GitHub, `ado_scrum` for ADO) - `--target-items CSV` - Optional comma-separated subset of item IDs #### `backlog diff` diff --git a/modules/backlog-core/src/backlog_core/adapters/backlog_protocol.py b/modules/backlog-core/src/backlog_core/adapters/backlog_protocol.py index 556d4f37..0b321515 100644 --- a/modules/backlog-core/src/backlog_core/adapters/backlog_protocol.py +++ b/modules/backlog-core/src/backlog_core/adapters/backlog_protocol.py @@ -26,6 +26,13 @@ def fetch_all_issues(self, project_id: str, filters: dict[str, Any] | None = Non def fetch_relationships(self, project_id: str) -> list[dict[str, Any]]: """Fetch all issue/work-item relationships for a project.""" + @beartype + @require(lambda project_id: project_id.strip() != "", "project_id must be non-empty") + @require(lambda payload: isinstance(payload, dict), "payload must be dict") + @ensure(lambda result: isinstance(result, dict), "create_issue must return dict") + def create_issue(self, project_id: str, payload: dict[str, Any]) -> dict[str, Any]: + """Create a provider issue/work item and return id/key/url metadata.""" + @beartype @require(lambda adapter: adapter is not None, "adapter must be provided") @@ -35,7 +42,7 @@ def require_backlog_graph_protocol(adapter: Any) -> BacklogGraphProtocol: if not isinstance(adapter, BacklogGraphProtocol): msg = ( f"Adapter '{type(adapter).__name__}' does not support BacklogGraphProtocol. " - "Expected methods: fetch_all_issues(project_id, filters), fetch_relationships(project_id)." + "Expected methods: fetch_all_issues(project_id, filters), fetch_relationships(project_id), create_issue(project_id, payload)." ) raise TypeError(msg) return adapter diff --git a/modules/backlog-core/src/backlog_core/commands/__init__.py b/modules/backlog-core/src/backlog_core/commands/__init__.py index 8b57b490..054d093f 100644 --- a/modules/backlog-core/src/backlog_core/commands/__init__.py +++ b/modules/backlog-core/src/backlog_core/commands/__init__.py @@ -3,6 +3,7 @@ from specfact_cli.contracts.module_interface import ModuleIOContract from specfact_cli.modules import module_io_shim +from .add import add from .analyze_deps import analyze_deps, trace_impact from .diff import diff from .promote import promote @@ -20,6 +21,7 @@ __all__ = [ "BacklogGraphToPlanBundle", + "add", "analyze_deps", "commands_interface", "compute_delta", diff --git a/modules/backlog-core/src/backlog_core/commands/add.py b/modules/backlog-core/src/backlog_core/commands/add.py new file mode 100644 index 00000000..75e7f5ed --- /dev/null +++ b/modules/backlog-core/src/backlog_core/commands/add.py @@ -0,0 +1,653 @@ +"""Backlog add command.""" + +from __future__ import annotations + +from pathlib import Path +from typing import Annotated, Any + +import requests +import typer +import yaml +from beartype import beartype +from icontract import require + +from backlog_core.adapters.backlog_protocol import require_backlog_graph_protocol +from backlog_core.graph.builder import BacklogGraphBuilder +from backlog_core.graph.config_schema import load_backlog_config_from_backlog_file, load_backlog_config_from_spec +from specfact_cli.adapters.registry import AdapterRegistry +from specfact_cli.models.dor_config import DefinitionOfReady +from specfact_cli.utils.prompts import print_error, print_info, print_success, print_warning, prompt_text + + +DEFAULT_CREATION_HIERARCHY: dict[str, list[str]] = { + "epic": [], + "feature": ["epic"], + "story": ["feature", "epic"], + "task": ["story", "feature"], + "bug": ["story", "feature", "epic"], + "spike": ["feature", "epic"], + "custom": ["epic", "feature", "story"], +} + +STORY_LIKE_TYPES: set[str] = {"story", "feature", "task", "bug"} + +DEFAULT_CUSTOM_MAPPING_FILES: dict[str, Path] = { + "ado": Path(".specfact/templates/backlog/field_mappings/ado_custom.yaml"), + "github": Path(".specfact/templates/backlog/field_mappings/github_custom.yaml"), +} + + +@beartype +def _prompt_multiline_text(field_label: str, end_marker: str) -> str: + """Read multiline text until a sentinel marker line is entered.""" + marker = end_marker.strip() or "::END::" + print_info(f"{field_label} (multiline). End input with '{marker}' on a new line.") + lines: list[str] = [] + while True: + try: + line = input() + except EOFError: + break + if line.strip() == marker: + break + lines.append(line) + return "\n".join(lines).strip() + + +@beartype +def _select_with_fallback(message: str, choices: list[str], default: str | None = None) -> str: + """Use questionary select when available, otherwise plain text prompt.""" + normalized = [choice for choice in choices if choice] + if not normalized: + return (default or "").strip() + + try: + import questionary # type: ignore[reportMissingImports] + + selected = questionary.select(message, choices=normalized, default=default).ask() + if isinstance(selected, str) and selected.strip(): + return selected.strip() + except Exception: + pass + + print_info(f"{message}: {', '.join(normalized)}") + fallback_default = default if default in normalized else normalized[0] + return prompt_text(message, default=fallback_default) + + +@beartype +def _interactive_sprint_selection(adapter_name: str, adapter_instance: Any, project_id: str) -> str | None: + """Prompt for sprint/iteration selection (provider-aware).""" + adapter_lower = adapter_name.strip().lower() + + if adapter_lower != "ado": + raw = prompt_text("Sprint/iteration (optional)", default="", required=False).strip() + return raw or None + + current_iteration: str | None = None + list_iterations: list[str] = [] + + restore_org = getattr(adapter_instance, "org", None) + restore_project = getattr(adapter_instance, "project", None) + resolver = getattr(adapter_instance, "_resolve_graph_project_context", None) + if callable(resolver): + try: + resolved_org, resolved_project = resolver(project_id) + if hasattr(adapter_instance, "org"): + adapter_instance.org = resolved_org + if hasattr(adapter_instance, "project"): + adapter_instance.project = resolved_project + except Exception: + pass + + get_current = getattr(adapter_instance, "_get_current_iteration", None) + if callable(get_current): + try: + resolved = get_current() + if isinstance(resolved, str) and resolved.strip(): + current_iteration = resolved.strip() + except Exception: + current_iteration = None + + get_list = getattr(adapter_instance, "_list_available_iterations", None) + if callable(get_list): + try: + candidates = get_list() + if isinstance(candidates, list): + list_iterations = [str(item).strip() for item in candidates if str(item).strip()] + except Exception: + list_iterations = [] + + if hasattr(adapter_instance, "org"): + adapter_instance.org = restore_org + if hasattr(adapter_instance, "project"): + adapter_instance.project = restore_project + + options = ["(skip sprint/iteration)"] + if current_iteration: + options.append(f"current: {current_iteration}") + options.extend([iteration for iteration in list_iterations if iteration != current_iteration]) + options.append("manual entry") + + default = f"current: {current_iteration}" if current_iteration else "manual entry" + selected = _select_with_fallback("Select sprint/iteration", options, default=default) + + if selected == "(skip sprint/iteration)": + return None + if selected.startswith("current: "): + return selected.removeprefix("current: ").strip() or None + if selected == "manual entry": + manual = prompt_text("Enter sprint/iteration path", default="", required=False).strip() + return manual or None + return selected.strip() or None + + +@beartype +@require(lambda value: isinstance(value, str), "value must be a string") +def _normalize_type(value: str) -> str: + return value.strip().lower().replace("_", " ").replace("-", " ") + + +@beartype +def _resolve_default_template(adapter_name: str, template: str | None) -> str: + if template and template.strip(): + return template.strip() + if adapter_name.strip().lower() == "ado": + return "ado_scrum" + return "github_projects" + + +@beartype +def _extract_item_type(item: Any) -> str: + """Best-effort normalized item type from graph item and raw payload.""" + value = getattr(item, "type", None) + enum_value = getattr(value, "value", None) + if isinstance(enum_value, str) and enum_value.strip(): + return _normalize_type(enum_value) + if isinstance(value, str) and value.strip(): + normalized = _normalize_type(value) + if normalized.startswith("itemtype."): + normalized = normalized.split(".", 1)[1] + if normalized: + return normalized + + inferred = getattr(item, "inferred_type", None) + inferred_value = getattr(inferred, "value", None) + if isinstance(inferred_value, str) and inferred_value.strip(): + return _normalize_type(inferred_value) + + raw_data = getattr(item, "raw_data", {}) + if isinstance(raw_data, dict): + fields = raw_data.get("fields") if isinstance(raw_data.get("fields"), dict) else {} + candidates = [ + raw_data.get("type"), + raw_data.get("work_item_type"), + fields.get("System.WorkItemType") if isinstance(fields, dict) else None, + raw_data.get("issue_type"), + ] + for candidate in candidates: + if isinstance(candidate, str) and candidate.strip(): + normalized = _normalize_type(candidate) + aliases = { + "user story": "story", + "product backlog item": "story", + "pb i": "story", + } + return aliases.get(normalized, normalized) + + return "custom" + + +@beartype +def _load_custom_config(custom_config: Path | None) -> dict[str, Any]: + if custom_config is None: + return {} + if not custom_config.exists(): + raise ValueError(f"Custom config file not found: {custom_config}") + loaded = yaml.safe_load(custom_config.read_text(encoding="utf-8")) + return loaded if isinstance(loaded, dict) else {} + + +@beartype +def _resolve_custom_config_path(adapter_name: str, custom_config: Path | None) -> Path | None: + """Resolve custom mapping file path with adapter-specific default fallback.""" + if custom_config is not None: + return custom_config + candidate = DEFAULT_CUSTOM_MAPPING_FILES.get(adapter_name.strip().lower()) + if candidate is not None and candidate.exists(): + return candidate + return None + + +@beartype +def _load_template_config(template: str) -> dict[str, Any]: + module_root = Path(__file__).resolve().parents[1] + template_file = module_root / "resources" / "backlog-templates" / f"{template}.yaml" + shared_template_file = ( + Path(__file__).resolve().parents[5] + / "src" + / "specfact_cli" + / "resources" + / "backlog-templates" + / f"{template}.yaml" + ) + + for candidate in (template_file, shared_template_file): + if candidate.exists(): + loaded = yaml.safe_load(candidate.read_text(encoding="utf-8")) + if isinstance(loaded, dict): + return loaded + return {} + + +@beartype +def _derive_creation_hierarchy(template_payload: dict[str, Any], custom_config: dict[str, Any]) -> dict[str, list[str]]: + custom_hierarchy = custom_config.get("creation_hierarchy") + if isinstance(custom_hierarchy, dict): + return { + _normalize_type(str(child)): [_normalize_type(str(parent)) for parent in parents] + for child, parents in custom_hierarchy.items() + if isinstance(parents, list) + } + + template_hierarchy = template_payload.get("creation_hierarchy") + if isinstance(template_hierarchy, dict): + return { + _normalize_type(str(child)): [_normalize_type(str(parent)) for parent in parents] + for child, parents in template_hierarchy.items() + if isinstance(parents, list) + } + + return DEFAULT_CREATION_HIERARCHY + + +@beartype +def _resolve_provider_fields_for_create( + adapter_name: str, + template_payload: dict[str, Any], + custom_config: dict[str, Any], + repo_path: Path, +) -> dict[str, Any] | None: + """Resolve provider-specific create payload fields from template/custom config.""" + if adapter_name.strip().lower() != "github": + return None + + def _extract_github_project_v2(source: dict[str, Any]) -> dict[str, Any]: + provider_fields = source.get("provider_fields") + if isinstance(provider_fields, dict): + candidate = provider_fields.get("github_project_v2") + if isinstance(candidate, dict): + return dict(candidate) + fallback = source.get("github_project_v2") + if isinstance(fallback, dict): + return dict(fallback) + return {} + + def _extract_github_issue_types(source: dict[str, Any]) -> dict[str, Any]: + provider_fields = source.get("provider_fields") + if isinstance(provider_fields, dict): + candidate = provider_fields.get("github_issue_types") + if isinstance(candidate, dict): + return dict(candidate) + fallback = source.get("github_issue_types") + if isinstance(fallback, dict): + return dict(fallback) + return {} + + spec_settings: dict[str, Any] = {} + backlog_cfg = load_backlog_config_from_backlog_file(repo_path / ".specfact" / "backlog-config.yaml") + spec_config = backlog_cfg or load_backlog_config_from_spec(repo_path / ".specfact" / "spec.yaml") + if spec_config is not None: + github_provider = spec_config.providers.get("github") + if github_provider is not None and isinstance(github_provider.settings, dict): + spec_settings = dict(github_provider.settings) + + template_cfg = _extract_github_project_v2(template_payload) + spec_cfg = _extract_github_project_v2(spec_settings) + custom_cfg = _extract_github_project_v2(custom_config) + + template_issue_types = _extract_github_issue_types(template_payload) + spec_issue_types = _extract_github_issue_types(spec_settings) + custom_issue_types = _extract_github_issue_types(custom_config) + + result: dict[str, Any] = {} + + if template_cfg or spec_cfg or custom_cfg: + template_option_ids = template_cfg.get("type_option_ids") + spec_option_ids = spec_cfg.get("type_option_ids") + custom_option_ids = custom_cfg.get("type_option_ids") + merged_option_ids: dict[str, Any] = {} + if isinstance(template_option_ids, dict): + merged_option_ids.update(template_option_ids) + if isinstance(spec_option_ids, dict): + merged_option_ids.update(spec_option_ids) + if isinstance(custom_option_ids, dict): + merged_option_ids.update(custom_option_ids) + + merged_cfg = {**template_cfg, **spec_cfg, **custom_cfg} + if merged_option_ids: + merged_cfg["type_option_ids"] = merged_option_ids + if merged_cfg: + result["github_project_v2"] = merged_cfg + + if template_issue_types or spec_issue_types or custom_issue_types: + template_type_ids = template_issue_types.get("type_ids") + spec_type_ids = spec_issue_types.get("type_ids") + custom_type_ids = custom_issue_types.get("type_ids") + merged_type_ids: dict[str, Any] = {} + if isinstance(template_type_ids, dict): + merged_type_ids.update(template_type_ids) + if isinstance(spec_type_ids, dict): + merged_type_ids.update(spec_type_ids) + if isinstance(custom_type_ids, dict): + merged_type_ids.update(custom_type_ids) + + issue_type_cfg = {**template_issue_types, **spec_issue_types, **custom_issue_types} + if merged_type_ids: + issue_type_cfg["type_ids"] = merged_type_ids + if issue_type_cfg: + result["github_issue_types"] = issue_type_cfg + + return result or None + + +@beartype +def _has_github_repo_issue_type_mapping(provider_fields: dict[str, Any] | None, issue_type: str) -> bool: + """Return True when repository GitHub issue-type mapping metadata is available.""" + if not isinstance(provider_fields, dict): + return False + issue_cfg = provider_fields.get("github_issue_types") + if not isinstance(issue_cfg, dict): + return False + type_ids = issue_cfg.get("type_ids") + if not isinstance(type_ids, dict): + return False + mapped = str(type_ids.get(issue_type) or type_ids.get(issue_type.lower()) or "").strip() + return bool(mapped) + + +@beartype +def _resolve_parent_id(parent_ref: str, graph_items: dict[str, Any]) -> tuple[str | None, str | None]: + normalized_ref = parent_ref.strip().lower() + + for item_id, item in graph_items.items(): + key = str(getattr(item, "key", "") or "").lower() + title = str(getattr(item, "title", "") or "").lower() + if normalized_ref in {item_id.lower(), key, title}: + return item_id, _extract_item_type(item) + + return None, None + + +@beartype +def _validate_parent(child_type: str, parent_type: str, hierarchy: dict[str, list[str]]) -> bool: + allowed = hierarchy.get(child_type, []) + if not allowed: + return True + return parent_type in allowed + + +@beartype +def _choose_parent_interactively( + issue_type: str, + graph_items: dict[str, Any], + hierarchy: dict[str, list[str]], +) -> str | None: + """Interactively choose parent from existing hierarchy-compatible items.""" + add_parent_choice = _select_with_fallback("Add parent issue?", ["yes", "no"], default="yes") + if add_parent_choice.strip().lower() != "yes": + return None + + allowed = set(hierarchy.get(issue_type, [])) + all_candidates: list[tuple[str, str]] = [] + candidates: list[tuple[str, str]] = [] + for item_id, item in graph_items.items(): + parent_type = _extract_item_type(item) + key = str(getattr(item, "key", item_id) or item_id) + title = str(getattr(item, "title", "") or "") + label = f"{key} | {title} | type={parent_type}" if title else f"{key} | type={parent_type}" + all_candidates.append((label, item_id)) + if allowed and parent_type not in allowed: + continue + candidates.append((label, item_id)) + + if not candidates: + if all_candidates: + print_warning( + "No hierarchy-compatible parent candidates found from inferred types. " + "Showing all issues so you can choose a parent manually." + ) + candidates = all_candidates + else: + print_warning("No hierarchy-compatible parent candidates found. Continuing without parent.") + return None + + options = ["(no parent)"] + [label for label, _ in candidates] + default_option = options[1] if len(options) > 1 else options[0] + selected = _select_with_fallback("Select parent issue", options, default=default_option) + if selected == "(no parent)": + return None + + mapping = dict(candidates) + return mapping.get(selected) + + +@beartype +def _parse_story_points(raw_value: str | None) -> int | float | None: + if raw_value is None: + return None + stripped = raw_value.strip() + if not stripped: + return None + try: + if "." in stripped: + return float(stripped) + return int(stripped) + except ValueError: + print_warning(f"Invalid story points '{raw_value}', keeping as text") + return None + + +@beartype +def add( + project_id: Annotated[str, typer.Option("--project-id", help="Backlog project identifier")], + adapter: Annotated[str, typer.Option("--adapter", help="Adapter to use")] = "github", + template: Annotated[str | None, typer.Option("--template", help="Template name for mapping")] = None, + issue_type: Annotated[str | None, typer.Option("--type", help="Issue type (story/task/feature/...)")] = None, + parent: Annotated[str | None, typer.Option("--parent", help="Parent issue id/key/title")] = None, + title: Annotated[str | None, typer.Option("--title", help="Issue title")] = None, + body: Annotated[str | None, typer.Option("--body", help="Issue body/description")] = None, + acceptance_criteria: Annotated[ + str | None, + typer.Option("--acceptance-criteria", help="Acceptance criteria text (recommended for story-like items)"), + ] = None, + priority: Annotated[str | None, typer.Option("--priority", help="Priority value (for example 1, high, P1)")] = None, + story_points: Annotated[ + str | None, typer.Option("--story-points", help="Story points value (integer/float)") + ] = None, + sprint: Annotated[str | None, typer.Option("--sprint", help="Sprint/iteration assignment")] = None, + body_end_marker: Annotated[ + str, + typer.Option("--body-end-marker", help="End marker for interactive multiline input"), + ] = "::END::", + description_format: Annotated[ + str, + typer.Option("--description-format", help="Description format: markdown or classic"), + ] = "markdown", + non_interactive: Annotated[bool, typer.Option("--non-interactive", help="Disable prompts")] = False, + check_dor: Annotated[ + bool, typer.Option("--check-dor", help="Validate Definition of Ready before creation") + ] = False, + repo_path: Annotated[Path, typer.Option("--repo-path", help="Repository path for DoR config")] = Path("."), + custom_config: Annotated[ + Path | None, typer.Option("--custom-config", help="Path to custom hierarchy/config YAML") + ] = None, +) -> None: + """Create a backlog item with optional parent hierarchy validation and DoR checks.""" + adapter_instance = AdapterRegistry.get_adapter(adapter) + interactive_mode = not non_interactive + + if non_interactive: + missing = [ + name for name, value in {"type": issue_type, "title": title}.items() if not (value and value.strip()) + ] + if missing: + print_error(f"{', '.join(missing)} required in --non-interactive mode") + raise typer.Exit(code=1) + else: + issue_type_choices = sorted(set(DEFAULT_CREATION_HIERARCHY.keys())) + if not issue_type: + issue_type = _select_with_fallback("Select issue type", issue_type_choices, default="story") + if not title: + title = prompt_text("Issue title") + if body is None: + body = _prompt_multiline_text("Issue body", body_end_marker) + if sprint is None: + sprint = _interactive_sprint_selection(adapter, adapter_instance, project_id) + description_format = _select_with_fallback( + "Select description format", + ["markdown", "classic"], + default=description_format or "markdown", + ).lower() + + normalized_issue_type = _normalize_type(issue_type or "") + if normalized_issue_type in STORY_LIKE_TYPES and acceptance_criteria is None: + capture_ac = _select_with_fallback("Add acceptance criteria?", ["yes", "no"], default="yes") + if capture_ac.strip().lower() == "yes": + acceptance_criteria = _prompt_multiline_text("Acceptance criteria", body_end_marker) + + if priority is None: + priority_raw = prompt_text("Priority (optional)", default="", required=False).strip() + priority = priority_raw or None + + if story_points is None and normalized_issue_type in STORY_LIKE_TYPES: + story_points = prompt_text("Story points (optional)", default="", required=False).strip() or None + + assert issue_type is not None + assert title is not None + issue_type = _normalize_type(issue_type) + title = title.strip() + body = (body or "").strip() + acceptance_criteria = (acceptance_criteria or "").strip() or None + priority = (priority or "").strip() or None + + description_format = (description_format or "markdown").strip().lower() + if description_format not in {"markdown", "classic"}: + print_error("description-format must be one of: markdown, classic") + raise typer.Exit(code=1) + + parsed_story_points = _parse_story_points(story_points) + + graph_adapter = require_backlog_graph_protocol(adapter_instance) + + template = _resolve_default_template(adapter, template) + print_info("Input captured. Preparing backlog context and validations before create...") + + resolved_custom_config = _resolve_custom_config_path(adapter, custom_config) + custom = _load_custom_config(resolved_custom_config) + template_payload = _load_template_config(template) + + fetch_filters = dict(custom.get("filters") or {}) + if adapter.strip().lower() == "ado": + fetch_filters.setdefault("use_current_iteration_default", False) + items = graph_adapter.fetch_all_issues(project_id, filters=fetch_filters) + relationships = graph_adapter.fetch_relationships(project_id) + + graph = ( + BacklogGraphBuilder( + provider=adapter, + template_name=template, + custom_config={**custom, "project_key": project_id}, + ) + .add_items(items) + .add_dependencies(relationships) + .build() + ) + + hierarchy = _derive_creation_hierarchy(template_payload, custom) + + parent_id: str | None = None + if parent: + parent_id, parent_type = _resolve_parent_id(parent, graph.items) + if not parent_id or not parent_type: + print_error(f"Parent '{parent}' not found") + raise typer.Exit(code=1) + if not _validate_parent(issue_type, parent_type, hierarchy): + allowed = hierarchy.get(issue_type, []) + print_error( + f"Type '{issue_type}' is not allowed under parent type '{parent_type}'. " + f"Allowed parent types: {', '.join(allowed) if allowed else '(any)'}" + ) + raise typer.Exit(code=1) + elif interactive_mode: + parent_id = _choose_parent_interactively(issue_type, graph.items, hierarchy) + + payload: dict[str, Any] = { + "type": issue_type, + "title": title, + "description": body, + "description_format": description_format, + } + if acceptance_criteria: + payload["acceptance_criteria"] = acceptance_criteria + if priority: + payload["priority"] = priority + if parsed_story_points is not None: + payload["story_points"] = parsed_story_points + if parent_id: + payload["parent_id"] = parent_id + if sprint: + payload["sprint"] = sprint + + provider_fields = _resolve_provider_fields_for_create(adapter, template_payload, custom, repo_path) + if provider_fields: + payload["provider_fields"] = provider_fields + + if adapter.strip().lower() == "github" and not _has_github_repo_issue_type_mapping(provider_fields, issue_type): + print_warning( + "GitHub repository issue-type mapping is not configured for this issue type; " + "issue type may fall back to labels/body only. Configure " + "backlog_config.providers.github.settings.github_issue_types.type_ids " + "(ProjectV2 mapping is optional) to enable automatic issue Type updates." + ) + + if check_dor: + dor_config = DefinitionOfReady.load_from_repo(repo_path) + if dor_config: + draft = { + "id": "DRAFT", + "title": title, + "body_markdown": body, + "description": body, + "type": issue_type, + "provider_fields": { + "acceptance_criteria": acceptance_criteria, + "priority": priority, + "story_points": parsed_story_points, + }, + } + dor_errors = dor_config.validate_item(draft) + if dor_errors: + print_warning("Definition of Ready (DoR) issues detected:") + for err in dor_errors: + print_warning(err) + raise typer.Exit(code=1) + print_info("Definition of Ready (DoR) satisfied") + + create_context = f"adapter={adapter}, format={description_format}" + if sprint: + create_context += f", sprint={sprint}" + if parent_id: + create_context += ", parent=selected" + print_info(f"Creating backlog item now ({create_context})...") + + try: + created = graph_adapter.create_issue(project_id, payload) + except (requests.Timeout, requests.ConnectionError) as error: + print_warning("Create request failed after send; item may already exist remotely.") + print_warning("Verify backlog for the title/key before retrying to avoid duplicates.") + raise typer.Exit(code=1) from error + print_success("Issue created successfully") + print_info(f"id: {created.get('id', '')}") + print_info(f"key: {created.get('key', '')}") + print_info(f"url: {created.get('url', '')}") diff --git a/modules/backlog-core/src/backlog_core/graph/builder.py b/modules/backlog-core/src/backlog_core/graph/builder.py index dadc8c54..92737c40 100644 --- a/modules/backlog-core/src/backlog_core/graph/builder.py +++ b/modules/backlog-core/src/backlog_core/graph/builder.py @@ -25,6 +25,10 @@ class BacklogConfigModel(BaseModel): description="Raw relationship type -> normalized dependency mapping", ) status_mapping: dict[str, str] = Field(default_factory=dict, description="Raw status -> normalized status mapping") + creation_hierarchy: dict[str, list[str]] = Field( + default_factory=dict, + description="Allowed parent types per child type", + ) @beartype @@ -105,6 +109,7 @@ def _flatten_config_payload(self, config_payload: dict[str, Any]) -> dict[str, A "type_mapping": dependency_data.get("type_mapping", {}), "dependency_rules": dependency_data.get("dependency_rules", {}), "status_mapping": dependency_data.get("status_mapping", {}), + "creation_hierarchy": dependency_data.get("creation_hierarchy", {}), "providers": {name: provider.model_dump() for name, provider in schema.providers.items()}, } return BacklogConfigModel.model_validate(config_payload).model_dump() @@ -117,7 +122,7 @@ def _merge_config(self, base: dict[str, Any], override: dict[str, Any]) -> dict[ value = override.get(key) if value is not None: merged[key] = value - for key in ("type_mapping", "dependency_rules", "status_mapping", "providers"): + for key in ("type_mapping", "dependency_rules", "status_mapping", "creation_hierarchy", "providers"): merged[key] = {**merged.get(key, {}), **override.get(key, {})} return merged diff --git a/modules/backlog-core/src/backlog_core/graph/config_schema.py b/modules/backlog-core/src/backlog_core/graph/config_schema.py index 40ab36d5..50a12ea5 100644 --- a/modules/backlog-core/src/backlog_core/graph/config_schema.py +++ b/modules/backlog-core/src/backlog_core/graph/config_schema.py @@ -16,6 +16,10 @@ class DependencyConfig(BaseModel): type_mapping: dict[str, str] = Field(default_factory=dict, description="Raw type -> normalized type mapping") dependency_rules: dict[str, str] = Field(default_factory=dict, description="Raw relation -> normalized mapping") status_mapping: dict[str, str] = Field(default_factory=dict, description="Raw status -> normalized status mapping") + creation_hierarchy: dict[str, list[str]] = Field( + default_factory=dict, + description="Allowed parent types per child type", + ) class ProviderConfig(BaseModel): @@ -42,12 +46,12 @@ class BacklogConfigSchema(BaseModel): devops_stages: dict[str, DevOpsStageConfig] = Field(default_factory=dict) -def load_backlog_config_from_spec(spec_path: Path) -> BacklogConfigSchema | None: - """Load backlog config from `.specfact/spec.yaml` if present and valid.""" - if not spec_path.exists(): +def _load_backlog_config_from_yaml(path: Path) -> BacklogConfigSchema | None: + """Load and validate backlog config payload from a YAML file path.""" + if not path.exists(): return None - loaded = yaml.safe_load(spec_path.read_text(encoding="utf-8")) + loaded = yaml.safe_load(path.read_text(encoding="utf-8")) if not isinstance(loaded, dict): return None @@ -61,3 +65,13 @@ def load_backlog_config_from_spec(spec_path: Path) -> BacklogConfigSchema | None payload["devops_stages"] = devops_stages return BacklogConfigSchema.model_validate(payload) + + +def load_backlog_config_from_spec(spec_path: Path) -> BacklogConfigSchema | None: + """Load backlog config from `.specfact/spec.yaml` if present and valid.""" + return _load_backlog_config_from_yaml(spec_path) + + +def load_backlog_config_from_backlog_file(config_path: Path) -> BacklogConfigSchema | None: + """Load backlog config from `.specfact/backlog-config.yaml` if present and valid.""" + return _load_backlog_config_from_yaml(config_path) diff --git a/modules/backlog-core/src/backlog_core/main.py b/modules/backlog-core/src/backlog_core/main.py index 45166885..a7c3f2a1 100644 --- a/modules/backlog-core/src/backlog_core/main.py +++ b/modules/backlog-core/src/backlog_core/main.py @@ -7,6 +7,7 @@ from typer.core import TyperGroup from backlog_core.commands import ( + add, analyze_deps, diff, generate_release_notes, @@ -25,13 +26,14 @@ class _BacklogCoreCommandGroup(TyperGroup): # Command groups first for discoverability. "delta": 10, # High-impact flow commands next. - "sync": 20, - "verify-readiness": 30, - "analyze-deps": 40, - "diff": 50, - "promote": 60, - "generate-release-notes": 70, - "trace-impact": 80, + "add": 20, + "sync": 30, + "verify-readiness": 40, + "analyze-deps": 50, + "diff": 60, + "promote": 70, + "generate-release-notes": 80, + "trace-impact": 90, } def list_commands(self, ctx: click.Context) -> list[str]: @@ -44,6 +46,7 @@ def list_commands(self, ctx: click.Context) -> list[str]: help="Backlog dependency analysis and sync", cls=_BacklogCoreCommandGroup, ) +backlog_app.command("add")(add) backlog_app.command("analyze-deps")(analyze_deps) backlog_app.command("trace-impact")(trace_impact) backlog_app.command("sync")(sync) diff --git a/modules/backlog-core/src/backlog_core/resources/backlog-templates/github_custom.yaml b/modules/backlog-core/src/backlog_core/resources/backlog-templates/github_custom.yaml new file mode 100644 index 00000000..a8c99127 --- /dev/null +++ b/modules/backlog-core/src/backlog_core/resources/backlog-templates/github_custom.yaml @@ -0,0 +1,22 @@ +type_mapping: + epic: epic + feature: feature + story: story + task: task + bug: bug +creation_hierarchy: + epic: [] + feature: [epic] + story: [feature, epic] + task: [story, feature] + bug: [story, feature, epic] +dependency_rules: + blocks: blocks + blocked_by: blocks + relates: relates_to +status_mapping: + open: todo + closed: done + todo: todo + in progress: in_progress + done: done diff --git a/modules/backlog-core/tests/unit/test_adapter_create_issue.py b/modules/backlog-core/tests/unit/test_adapter_create_issue.py new file mode 100644 index 00000000..bf048d6e --- /dev/null +++ b/modules/backlog-core/tests/unit/test_adapter_create_issue.py @@ -0,0 +1,334 @@ +"""Unit tests for backlog adapter create_issue contract.""" + +from __future__ import annotations + +import sys +from pathlib import Path + + +# ruff: noqa: E402 + + +REPO_ROOT = Path(__file__).resolve().parents[4] +sys.path.insert(0, str(REPO_ROOT / "modules" / "backlog-core" / "src")) +sys.path.insert(0, str(REPO_ROOT / "src")) + +from specfact_cli.adapters.ado import AdoAdapter +from specfact_cli.adapters.github import GitHubAdapter + + +class _DummyResponse: + def __init__(self, payload: dict) -> None: + self._payload = payload + self.status_code = 201 + self.ok = True + self.text = "" + + def raise_for_status(self) -> None: + return None + + def json(self) -> dict: + return self._payload + + +def test_github_create_issue_maps_payload_and_returns_shape(monkeypatch) -> None: + """GitHub create_issue sends issue payload and normalizes response fields.""" + adapter = GitHubAdapter(repo_owner="nold-ai", repo_name="specfact-cli", api_token="token", use_gh_cli=False) + + captured: dict = {} + + def _fake_post(url: str, json: dict, headers: dict, timeout: int): + captured["url"] = url + captured["json"] = json + captured["headers"] = headers + captured["timeout"] = timeout + return _DummyResponse({"id": 77, "number": 42, "html_url": "https://github.com/nold-ai/specfact-cli/issues/42"}) + + import specfact_cli.adapters.github as github_module + + monkeypatch.setattr(github_module.requests, "post", _fake_post) + + retry_call: dict[str, object] = {} + + def _capture_retry(request_callable, **kwargs): + retry_call.update(kwargs) + return request_callable() + + monkeypatch.setattr(adapter, "_request_with_retry", _capture_retry) + + result = adapter.create_issue( + "nold-ai/specfact-cli", + { + "type": "story", + "title": "Implement X", + "description": "Acceptance criteria: ...", + "acceptance_criteria": "Given/When/Then", + "priority": "high", + "story_points": 5, + "parent_id": "100", + }, + ) + + assert retry_call.get("retry_on_ambiguous_transport") is False + assert captured["url"].endswith("/repos/nold-ai/specfact-cli/issues") + assert captured["json"]["title"] == "Implement X" + labels = [label.lower() for label in captured["json"]["labels"]] + assert "story" in labels + assert "priority:high" in labels + assert "story-points:5" in labels + assert "acceptance criteria" in captured["json"]["body"].lower() + assert result == {"id": "42", "key": "42", "url": "https://github.com/nold-ai/specfact-cli/issues/42"} + + +def test_ado_create_issue_maps_payload_and_parent_relation(monkeypatch) -> None: + """ADO create_issue sends JSON patch and includes parent relation when provided.""" + adapter = AdoAdapter(org="nold-ai", project="specfact-cli", api_token="token") + + captured: dict = {} + + def _fake_patch(url: str, json: list, headers: dict, timeout: int): + captured["url"] = url + captured["json"] = json + captured["headers"] = headers + captured["timeout"] = timeout + return _DummyResponse( + { + "id": 901, + "url": "https://dev.azure.com/nold-ai/specfact-cli/_apis/wit/workItems/901", + "_links": { + "html": {"href": "https://dev.azure.com/nold-ai/specfact-cli/_workitems/edit/901"}, + }, + } + ) + + import specfact_cli.adapters.ado as ado_module + + monkeypatch.setattr(ado_module.requests, "patch", _fake_patch) + + retry_call: dict[str, object] = {} + + def _capture_retry(request_callable, **kwargs): + retry_call.update(kwargs) + return request_callable() + + monkeypatch.setattr(adapter, "_request_with_retry", _capture_retry) + + result = adapter.create_issue( + "nold-ai/specfact-cli", + { + "type": "story", + "title": "Implement X", + "description": "Acceptance criteria: ...", + "acceptance_criteria": "Given/When/Then", + "priority": 1, + "story_points": 8, + "sprint": "Project\\Release 1\\Sprint 3", + "parent_id": "123", + "description_format": "classic", + }, + ) + + assert retry_call.get("retry_on_ambiguous_transport") is False + assert "/_apis/wit/workitems/$" in captured["url"] + assert any(op.get("path") == "/fields/System.Title" and op.get("value") == "Implement X" for op in captured["json"]) + assert any(op.get("path") == "/relations/-" for op in captured["json"]) + assert any( + op.get("path") == "/multilineFieldsFormat/System.Description" and op.get("value") == "Html" + for op in captured["json"] + ) + assert any(op.get("path") == "/fields/Microsoft.VSTS.Common.AcceptanceCriteria" for op in captured["json"]) + assert any( + op.get("path") == "/fields/Microsoft.VSTS.Common.Priority" and op.get("value") == 1 for op in captured["json"] + ) + assert any( + op.get("path") == "/fields/Microsoft.VSTS.Scheduling.StoryPoints" and op.get("value") == 8 + for op in captured["json"] + ) + assert any( + op.get("path") == "/fields/System.IterationPath" and op.get("value") == "Project\\Release 1\\Sprint 3" + for op in captured["json"] + ) + assert result == { + "id": "901", + "key": "901", + "url": "https://dev.azure.com/nold-ai/specfact-cli/_workitems/edit/901", + } + + +def test_github_create_issue_sets_projects_type_field_when_configured(monkeypatch) -> None: + """GitHub create_issue can set ProjectV2 Type field when config is provided.""" + adapter = GitHubAdapter(repo_owner="nold-ai", repo_name="specfact-cli", api_token="token", use_gh_cli=False) + + calls: list[tuple[str, dict]] = [] + + def _fake_post(url: str, json: dict, headers: dict, timeout: int): + _ = headers, timeout + calls.append((url, json)) + if url.endswith("/issues"): + return _DummyResponse( + { + "id": 88, + "number": 55, + "node_id": "ISSUE_NODE_55", + "html_url": "https://github.com/nold-ai/specfact-cli/issues/55", + } + ) + if url.endswith("/graphql"): + query = str(json.get("query") or "") + if "addProjectV2ItemById" in query: + return _DummyResponse({"data": {"addProjectV2ItemById": {"item": {"id": "PVT_ITEM_1"}}}}) + if "updateProjectV2ItemFieldValue" in query: + return _DummyResponse( + {"data": {"updateProjectV2ItemFieldValue": {"projectV2Item": {"id": "PVT_ITEM_1"}}}} + ) + return _DummyResponse({"data": {}}) + raise AssertionError(f"Unexpected URL: {url}") + + import specfact_cli.adapters.github as github_module + + monkeypatch.setattr(github_module.requests, "post", _fake_post) + + result = adapter.create_issue( + "nold-ai/specfact-cli", + { + "type": "story", + "title": "Implement projects type", + "description": "Body", + "provider_fields": { + "github_project_v2": { + "project_id": "PVT_PROJECT_1", + "type_field_id": "PVT_FIELD_TYPE", + "type_option_ids": { + "story": "PVT_OPTION_STORY", + }, + } + }, + }, + ) + + graphql_calls = [entry for entry in calls if entry[0].endswith("/graphql")] + assert len(graphql_calls) == 2 + + add_variables = graphql_calls[0][1]["variables"] + assert add_variables == {"projectId": "PVT_PROJECT_1", "contentId": "ISSUE_NODE_55"} + + set_variables = graphql_calls[1][1]["variables"] + assert set_variables["projectId"] == "PVT_PROJECT_1" + assert set_variables["itemId"] == "PVT_ITEM_1" + assert set_variables["fieldId"] == "PVT_FIELD_TYPE" + assert set_variables["optionId"] == "PVT_OPTION_STORY" + + assert result == {"id": "55", "key": "55", "url": "https://github.com/nold-ai/specfact-cli/issues/55"} + + +def test_github_create_issue_sets_repository_issue_type_when_configured(monkeypatch) -> None: + """GitHub create_issue sets repository issue Type when mapping is configured.""" + adapter = GitHubAdapter(repo_owner="nold-ai", repo_name="specfact-cli", api_token="token", use_gh_cli=False) + + calls: list[tuple[str, dict]] = [] + + def _fake_post(url: str, json: dict, headers: dict, timeout: int): + _ = headers, timeout + calls.append((url, json)) + if url.endswith("/issues"): + return _DummyResponse( + { + "id": 188, + "number": 77, + "node_id": "ISSUE_NODE_77", + "html_url": "https://github.com/nold-ai/specfact-cli/issues/77", + } + ) + if url.endswith("/graphql"): + query = str(json.get("query") or "") + if "updateIssue(input:" in query: + return _DummyResponse({"data": {"updateIssue": {"issue": {"id": "ISSUE_NODE_77"}}}}) + return _DummyResponse({"data": {}}) + raise AssertionError(f"Unexpected URL: {url}") + + import specfact_cli.adapters.github as github_module + + monkeypatch.setattr(github_module.requests, "post", _fake_post) + + result = adapter.create_issue( + "nold-ai/specfact-cli", + { + "type": "task", + "title": "Apply issue type", + "description": "Body", + "provider_fields": { + "github_issue_types": { + "type_ids": { + "task": "IT_kwDODWwjB84Brk47", + } + } + }, + }, + ) + + graphql_calls = [entry for entry in calls if entry[0].endswith("/graphql")] + assert len(graphql_calls) == 1 + variables = graphql_calls[0][1]["variables"] + assert variables == {"issueId": "ISSUE_NODE_77", "issueTypeId": "IT_kwDODWwjB84Brk47"} + assert result == {"id": "77", "key": "77", "url": "https://github.com/nold-ai/specfact-cli/issues/77"} + + +def test_github_create_issue_links_native_parent_subissue(monkeypatch) -> None: + """GitHub create_issue links parent relationship via native sidebar sub-issue mutation.""" + adapter = GitHubAdapter(repo_owner="nold-ai", repo_name="specfact-cli", api_token="token", use_gh_cli=False) + + calls: list[tuple[str, dict]] = [] + + def _fake_post(url: str, json: dict, headers: dict, timeout: int): + _ = headers, timeout + calls.append((url, json)) + if url.endswith("/issues"): + return _DummyResponse( + { + "id": 288, + "number": 99, + "node_id": "ISSUE_NODE_99", + "html_url": "https://github.com/nold-ai/specfact-cli/issues/99", + } + ) + if url.endswith("/graphql"): + query = str(json.get("query") or "") + if "repository(owner:$owner, name:$repo)" in query and "issue(number:$number)" in query: + return _DummyResponse({"data": {"repository": {"issue": {"id": "ISSUE_NODE_PARENT_11"}}}}) + if "addSubIssue(input:" in query: + return _DummyResponse( + { + "data": { + "addSubIssue": { + "issue": {"id": "ISSUE_NODE_PARENT_11"}, + "subIssue": {"id": "ISSUE_NODE_99"}, + } + } + } + ) + return _DummyResponse({"data": {}}) + raise AssertionError(f"Unexpected URL: {url}") + + import specfact_cli.adapters.github as github_module + + monkeypatch.setattr(github_module.requests, "post", _fake_post) + + result = adapter.create_issue( + "nold-ai/specfact-cli", + { + "type": "task", + "title": "Link native parent", + "description": "Body", + "parent_id": "11", + }, + ) + + graphql_calls = [entry for entry in calls if entry[0].endswith("/graphql")] + assert len(graphql_calls) == 2 + + lookup_variables = graphql_calls[0][1]["variables"] + assert lookup_variables == {"owner": "nold-ai", "repo": "specfact-cli", "number": 11} + + link_variables = graphql_calls[1][1]["variables"] + assert link_variables == {"parentIssueId": "ISSUE_NODE_PARENT_11", "subIssueId": "ISSUE_NODE_99"} + assert result == {"id": "99", "key": "99", "url": "https://github.com/nold-ai/specfact-cli/issues/99"} diff --git a/modules/backlog-core/tests/unit/test_add_command.py b/modules/backlog-core/tests/unit/test_add_command.py new file mode 100644 index 00000000..fc7e660b --- /dev/null +++ b/modules/backlog-core/tests/unit/test_add_command.py @@ -0,0 +1,800 @@ +"""Unit tests for backlog add interactive issue creation command.""" + +from __future__ import annotations + +import sys +from pathlib import Path + +from typer.testing import CliRunner + + +# ruff: noqa: E402 + + +REPO_ROOT = Path(__file__).resolve().parents[4] +sys.path.insert(0, str(REPO_ROOT / "modules" / "backlog-core" / "src")) +sys.path.insert(0, str(REPO_ROOT / "src")) + +from backlog_core.main import backlog_app + + +runner = CliRunner() + + +class _FakeAdapter: + def __init__(self, items: list[dict], relationships: list[dict], created: list[dict]) -> None: + self._items = items + self._relationships = relationships + self.created = created + + def fetch_all_issues(self, project_id: str, filters: dict | None = None) -> list[dict]: + _ = project_id, filters + return self._items + + def fetch_relationships(self, project_id: str) -> list[dict]: + _ = project_id + return self._relationships + + def create_issue(self, project_id: str, payload: dict) -> dict: + _ = project_id + self.created.append(payload) + return {"id": "123", "key": "123", "url": "https://example.test/issues/123"} + + +def test_backlog_add_non_interactive_requires_type_and_title(monkeypatch) -> None: + """Non-interactive add fails when required options are missing.""" + from specfact_cli.adapters.registry import AdapterRegistry + + monkeypatch.setattr(AdapterRegistry, "get_adapter", lambda _adapter: _FakeAdapter([], [], [])) + + result = runner.invoke( + backlog_app, + [ + "add", + "--project-id", + "nold-ai/specfact-cli", + "--adapter", + "github", + "--non-interactive", + ], + ) + + assert result.exit_code == 1 + assert "required in --non-interactive mode" in result.stdout + + +def test_backlog_add_validates_missing_parent(monkeypatch) -> None: + """Add fails when provided parent key/id cannot be resolved.""" + from specfact_cli.adapters.registry import AdapterRegistry + + adapter = _FakeAdapter(items=[], relationships=[], created=[]) + monkeypatch.setattr(AdapterRegistry, "get_adapter", lambda _adapter: adapter) + + result = runner.invoke( + backlog_app, + [ + "add", + "--project-id", + "nold-ai/specfact-cli", + "--adapter", + "github", + "--type", + "story", + "--parent", + "FEAT-123", + "--title", + "Implement X", + "--non-interactive", + ], + ) + + assert result.exit_code == 1 + assert "Parent 'FEAT-123' not found" in result.stdout + + +def test_backlog_add_uses_default_hierarchy_when_no_github_custom_mapping_file(monkeypatch, tmp_path: Path) -> None: + """Add falls back to default hierarchy when github_custom mapping file is absent.""" + from specfact_cli.adapters.registry import AdapterRegistry + + items = [ + { + "id": "42", + "key": "STORY-1", + "title": "Story Parent", + "type": "story", + "status": "todo", + } + ] + created_payloads: list[dict] = [] + adapter = _FakeAdapter(items=items, relationships=[], created=created_payloads) + monkeypatch.setattr(AdapterRegistry, "get_adapter", lambda _adapter: adapter) + monkeypatch.chdir(tmp_path) + + result = runner.invoke( + backlog_app, + [ + "add", + "--project-id", + "nold-ai/specfact-cli", + "--adapter", + "github", + "--type", + "task", + "--parent", + "STORY-1", + "--title", + "Implement X", + "--body", + "Body", + "--non-interactive", + ], + ) + + assert result.exit_code == 0 + assert created_payloads + + +def test_backlog_add_auto_applies_github_custom_mapping_file(monkeypatch, tmp_path: Path) -> None: + """Add automatically loads .specfact github_custom mapping file when present.""" + from specfact_cli.adapters.registry import AdapterRegistry + + custom_mapping_file = tmp_path / ".specfact" / "templates" / "backlog" / "field_mappings" / "github_custom.yaml" + custom_mapping_file.parent.mkdir(parents=True, exist_ok=True) + custom_mapping_file.write_text( + """ +creation_hierarchy: + task: [epic] +""".strip(), + encoding="utf-8", + ) + + items = [ + { + "id": "42", + "key": "STORY-1", + "title": "Story Parent", + "type": "story", + "status": "todo", + } + ] + created_payloads: list[dict] = [] + adapter = _FakeAdapter(items=items, relationships=[], created=created_payloads) + monkeypatch.setattr(AdapterRegistry, "get_adapter", lambda _adapter: adapter) + monkeypatch.chdir(tmp_path) + + result = runner.invoke( + backlog_app, + [ + "add", + "--project-id", + "nold-ai/specfact-cli", + "--adapter", + "github", + "--type", + "task", + "--parent", + "STORY-1", + "--title", + "Implement X", + "--body", + "Body", + "--non-interactive", + ], + ) + + assert result.exit_code == 1 + assert "Type 'task' is not allowed under parent type 'story'" in result.stdout + + +def test_backlog_add_honors_creation_hierarchy_from_custom_config(monkeypatch, tmp_path: Path) -> None: + """Add validates child->parent relationship using explicit hierarchy config.""" + from specfact_cli.adapters.registry import AdapterRegistry + + config_file = tmp_path / "custom.yaml" + config_file.write_text( + """ +creation_hierarchy: + story: [feature] +""".strip(), + encoding="utf-8", + ) + + items = [ + { + "id": "42", + "key": "FEAT-123", + "title": "Parent", + "type": "feature", + "status": "todo", + } + ] + created_payloads: list[dict] = [] + adapter = _FakeAdapter(items=items, relationships=[], created=created_payloads) + monkeypatch.setattr(AdapterRegistry, "get_adapter", lambda _adapter: adapter) + + result = runner.invoke( + backlog_app, + [ + "add", + "--project-id", + "nold-ai/specfact-cli", + "--adapter", + "github", + "--template", + "github_projects", + "--custom-config", + str(config_file), + "--type", + "story", + "--parent", + "FEAT-123", + "--title", + "Implement X", + "--body", + "Acceptance criteria: ...", + "--non-interactive", + ], + ) + + assert result.exit_code == 0 + assert created_payloads and created_payloads[0]["parent_id"] == "42" + + +def test_backlog_add_check_dor_blocks_invalid_draft(monkeypatch, tmp_path: Path) -> None: + """Add fails DoR check when configured required fields are missing.""" + from specfact_cli.adapters.registry import AdapterRegistry + + dor_dir = tmp_path / ".specfact" + dor_dir.mkdir(parents=True, exist_ok=True) + (dor_dir / "dor.yaml").write_text( + """ +rules: + acceptance_criteria: true +""".strip(), + encoding="utf-8", + ) + + adapter = _FakeAdapter(items=[], relationships=[], created=[]) + monkeypatch.setattr(AdapterRegistry, "get_adapter", lambda _adapter: adapter) + + result = runner.invoke( + backlog_app, + [ + "add", + "--project-id", + "nold-ai/specfact-cli", + "--adapter", + "github", + "--type", + "story", + "--title", + "Implement X", + "--body", + "No explicit section", + "--non-interactive", + "--check-dor", + "--repo-path", + str(tmp_path), + ], + ) + + assert result.exit_code == 1 + assert "Definition of Ready" in result.stdout + + +class _FakeAdoAdapter(_FakeAdapter): + def _get_current_iteration(self) -> str | None: + return "Project\\Sprint 42" + + def _list_available_iterations(self) -> list[str]: + return ["Project\\Sprint 41", "Project\\Sprint 42"] + + +def test_backlog_add_interactive_multiline_body_uses_end_marker(monkeypatch) -> None: + """Interactive add supports multiline body input terminated by marker.""" + from specfact_cli.adapters.registry import AdapterRegistry + + created_payloads: list[dict] = [] + adapter = _FakeAdapter(items=[], relationships=[], created=created_payloads) + monkeypatch.setattr(AdapterRegistry, "get_adapter", lambda _adapter: adapter) + + import importlib + + add_module = importlib.import_module("backlog_core.commands.add") + + def _select(message: str, _choices: list[str], default: str | None = None) -> str: + lowered = message.lower() + if "issue type" in lowered: + return "story" + if "description format" in lowered: + return "markdown" + if "acceptance criteria" in lowered: + return "no" + if "add parent issue" in lowered: + return "no" + return default or "markdown" + + monkeypatch.setattr(add_module, "_select_with_fallback", _select) + + result = runner.invoke( + backlog_app, + [ + "add", + "--project-id", + "nold-ai/specfact-cli", + "--adapter", + "github", + "--body-end-marker", + "::END::", + ], + input="Interactive title\nline one\nline two\n::END::\n\n\n\n", + ) + + assert result.exit_code == 0 + assert created_payloads + assert created_payloads[0]["description"] == "line one\nline two" + + +def test_backlog_add_interactive_ado_selects_current_iteration(monkeypatch) -> None: + """Interactive add can set sprint from current ADO iteration selection.""" + import importlib + + add_module = importlib.import_module("backlog_core.commands.add") + + from specfact_cli.adapters.registry import AdapterRegistry + + created_payloads: list[dict] = [] + adapter = _FakeAdoAdapter(items=[], relationships=[], created=created_payloads) + monkeypatch.setattr(AdapterRegistry, "get_adapter", lambda _adapter: adapter) + + def _select(message: str, _choices: list[str], default: str | None = None) -> str: + lowered = message.lower() + if "issue type" in lowered: + return "story" + if "sprint/iteration" in lowered: + return "current: Project\\Sprint 42" + if "description format" in lowered: + return "markdown" + if "acceptance criteria" in lowered: + return "no" + if "add parent issue" in lowered: + return "no" + return "markdown" + + monkeypatch.setattr(add_module, "_select_with_fallback", _select) + + result = runner.invoke( + backlog_app, + [ + "add", + "--project-id", + "dominikusnold/Specfact CLI", + "--adapter", + "ado", + ], + input="ADO story\nbody line\n::END::\n\n\n", + ) + + assert result.exit_code == 0 + assert created_payloads + assert created_payloads[0]["sprint"] == "Project\\Sprint 42" + + +def test_backlog_add_interactive_collects_story_fields_and_parent(monkeypatch) -> None: + """Interactive story flow captures AC/priority/story points and selected parent.""" + import importlib + + add_module = importlib.import_module("backlog_core.commands.add") + + from specfact_cli.adapters.registry import AdapterRegistry + + items = [ + { + "id": "42", + "key": "FEAT-123", + "title": "Parent feature", + "type": "feature", + "status": "todo", + } + ] + created_payloads: list[dict] = [] + adapter = _FakeAdapter(items=items, relationships=[], created=created_payloads) + monkeypatch.setattr(AdapterRegistry, "get_adapter", lambda _adapter: adapter) + + def _select(message: str, _choices: list[str], default: str | None = None) -> str: + lowered = message.lower() + if "issue type" in lowered: + return "story" + if "description format" in lowered: + return "markdown" + if "acceptance criteria" in lowered: + return "yes" + if "add parent issue" in lowered: + return "yes" + if "select parent issue" in lowered: + return "FEAT-123 | Parent feature | type=feature" + return "markdown" + + monkeypatch.setattr(add_module, "_select_with_fallback", _select) + + result = runner.invoke( + backlog_app, + [ + "add", + "--project-id", + "nold-ai/specfact-cli", + "--adapter", + "github", + ], + input="Story title\nbody line\n::END::\n\nac line\n::END::\nhigh\n5\n", + ) + + assert result.exit_code == 0 + assert created_payloads + payload = created_payloads[0] + assert payload["acceptance_criteria"] == "ac line" + assert payload["priority"] == "high" + assert payload["story_points"] == 5 + assert payload["parent_id"] == "42" + + +def test_backlog_add_interactive_parent_selection_falls_back_to_all_candidates(monkeypatch) -> None: + """Interactive parent picker falls back to all candidates when type inference yields no matches.""" + import importlib + + add_module = importlib.import_module("backlog_core.commands.add") + + from specfact_cli.adapters.registry import AdapterRegistry + + items = [ + { + "id": "42", + "key": "STORY-1", + "title": "Parent", + "type": "custom", + "status": "todo", + } + ] + created_payloads: list[dict] = [] + adapter = _FakeAdapter(items=items, relationships=[], created=created_payloads) + monkeypatch.setattr(AdapterRegistry, "get_adapter", lambda _adapter: adapter) + + def _select(message: str, _choices: list[str], default: str | None = None) -> str: + lowered = message.lower() + if "issue type" in lowered: + return "task" + if "description format" in lowered: + return "markdown" + if "acceptance criteria" in lowered: + return "no" + if "add parent issue" in lowered: + return "yes" + if "select parent issue" in lowered: + return "STORY-1 | Parent | type=custom" + return default or "markdown" + + monkeypatch.setattr(add_module, "_select_with_fallback", _select) + + result = runner.invoke( + backlog_app, + [ + "add", + "--project-id", + "nold-ai/specfact-cli", + "--adapter", + "github", + ], + input="Task title\nBody line\n::END::\n\n\n\n", + ) + + assert result.exit_code == 0 + assert "No hierarchy-compatible parent candidates found from inferred types." in result.stdout + assert created_payloads + assert created_payloads[0]["parent_id"] == "42" + + +def test_backlog_add_ado_default_template_enables_epic_parent_candidates(monkeypatch) -> None: + """ADO add without explicit template should still resolve epic parent candidate for feature.""" + import importlib + + add_module = importlib.import_module("backlog_core.commands.add") + + from specfact_cli.adapters.registry import AdapterRegistry + + items = [ + { + "id": "900", + "key": "EPIC-900", + "title": "Platform Epic", + "work_item_type": "Epic", + "status": "New", + } + ] + created_payloads: list[dict] = [] + adapter = _FakeAdoAdapter(items=items, relationships=[], created=created_payloads) + monkeypatch.setattr(AdapterRegistry, "get_adapter", lambda _adapter: adapter) + + def _select(message: str, _choices: list[str], default: str | None = None) -> str: + lowered = message.lower() + if "issue type" in lowered: + return "feature" + if "sprint/iteration" in lowered: + return "(skip sprint/iteration)" + if "description format" in lowered: + return "markdown" + if "acceptance criteria" in lowered: + return "no" + if "add parent issue" in lowered: + return "yes" + if "select parent issue" in lowered: + return "EPIC-900 | Platform Epic | type=epic" + return default or "markdown" + + monkeypatch.setattr(add_module, "_select_with_fallback", _select) + + result = runner.invoke( + backlog_app, + [ + "add", + "--project-id", + "dominikusnold/Specfact CLI", + "--adapter", + "ado", + ], + input="Feature title\nFeature body\n::END::\n\n\n", + ) + + assert result.exit_code == 0 + assert created_payloads + assert created_payloads[0].get("parent_id") == "900" + + +def test_backlog_add_warns_on_ambiguous_create_failure(monkeypatch) -> None: + """CLI warns user when duplicate-safe create fails with ambiguous transport error.""" + import requests + + from specfact_cli.adapters.registry import AdapterRegistry + + class _TimeoutAdapter(_FakeAdapter): + def create_issue(self, project_id: str, payload: dict) -> dict: # type: ignore[override] + _ = project_id, payload + raise requests.Timeout("network timeout") + + adapter = _TimeoutAdapter(items=[], relationships=[], created=[]) + monkeypatch.setattr(AdapterRegistry, "get_adapter", lambda _adapter: adapter) + + result = runner.invoke( + backlog_app, + [ + "add", + "--project-id", + "nold-ai/specfact-cli", + "--adapter", + "github", + "--type", + "story", + "--title", + "Implement X", + "--non-interactive", + ], + ) + + assert result.exit_code == 1 + assert "may already exist remotely" in result.stdout + assert "before retrying to avoid duplicates" in result.stdout + + +def test_backlog_add_interactive_ado_sprint_lookup_uses_project_context(monkeypatch) -> None: + """ADO sprint lookup uses project_id-resolved org/project context before selection.""" + import importlib + + add_module = importlib.import_module("backlog_core.commands.add") + + from specfact_cli.adapters.registry import AdapterRegistry + + class _ContextAdoAdapter(_FakeAdapter): + def __init__(self) -> None: + super().__init__(items=[], relationships=[], created=[]) + self.org = None + self.project = None + + def _resolve_graph_project_context(self, project_id: str) -> tuple[str, str]: + assert project_id == "dominikusnold/Specfact CLI" + return "dominikusnold", "Specfact CLI" + + def _get_current_iteration(self) -> str | None: + if self.org == "dominikusnold" and self.project == "Specfact CLI": + return r"Specfact CLI\2026\Sprint 01" + return None + + def _list_available_iterations(self) -> list[str]: + if self.org == "dominikusnold" and self.project == "Specfact CLI": + return [r"Specfact CLI\2026\Sprint 01", r"Specfact CLI\2026\Sprint 02"] + return [] + + adapter = _ContextAdoAdapter() + monkeypatch.setattr(AdapterRegistry, "get_adapter", lambda _adapter: adapter) + + def _select(message: str, _choices: list[str], default: str | None = None) -> str: + lowered = message.lower() + if "issue type" in lowered: + return "story" + if "sprint/iteration" in lowered: + return r"current: Specfact CLI\2026\Sprint 01" + if "description format" in lowered: + return "markdown" + if "acceptance criteria" in lowered: + return "no" + if "add parent issue" in lowered: + return "no" + return default or "markdown" + + monkeypatch.setattr(add_module, "_select_with_fallback", _select) + + result = runner.invoke( + backlog_app, + [ + "add", + "--project-id", + "dominikusnold/Specfact CLI", + "--adapter", + "ado", + ], + input="Story title\nBody\n::END::\n\n\n", + ) + + assert result.exit_code == 0 + assert adapter.created + assert adapter.created[0].get("sprint") == r"Specfact CLI\2026\Sprint 01" + + +def test_backlog_add_forwards_github_project_v2_provider_fields(monkeypatch, tmp_path: Path) -> None: + """backlog add forwards GitHub ProjectV2 config from custom config into create payload.""" + from specfact_cli.adapters.registry import AdapterRegistry + + config_file = tmp_path / "custom.yaml" + config_file.write_text( + """ +provider_fields: + github_project_v2: + project_id: PVT_PROJECT_1 + type_field_id: PVT_FIELD_TYPE + type_option_ids: + story: PVT_OPTION_STORY +""".strip(), + encoding="utf-8", + ) + + created_payloads: list[dict] = [] + adapter = _FakeAdapter(items=[], relationships=[], created=created_payloads) + monkeypatch.setattr(AdapterRegistry, "get_adapter", lambda _adapter: adapter) + + result = runner.invoke( + backlog_app, + [ + "add", + "--project-id", + "nold-ai/specfact-cli", + "--adapter", + "github", + "--template", + "github_projects", + "--custom-config", + str(config_file), + "--type", + "story", + "--title", + "Implement X", + "--body", + "Acceptance criteria", + "--non-interactive", + ], + ) + + assert result.exit_code == 0 + assert created_payloads + provider_fields = created_payloads[0].get("provider_fields") + assert isinstance(provider_fields, dict) + github_project_v2 = provider_fields.get("github_project_v2") + assert isinstance(github_project_v2, dict) + assert github_project_v2.get("project_id") == "PVT_PROJECT_1" + assert github_project_v2.get("type_field_id") == "PVT_FIELD_TYPE" + assert github_project_v2.get("type_option_ids", {}).get("story") == "PVT_OPTION_STORY" + + +def test_backlog_add_forwards_github_project_v2_from_backlog_config(monkeypatch, tmp_path: Path) -> None: + """backlog add loads GitHub ProjectV2 config from .specfact/backlog-config.yaml provider settings.""" + from specfact_cli.adapters.registry import AdapterRegistry + + spec_dir = tmp_path / ".specfact" + spec_dir.mkdir(parents=True, exist_ok=True) + (spec_dir / "backlog-config.yaml").write_text( + """ +backlog_config: + providers: + github: + adapter: github + project_id: nold-ai/specfact-demo-repo + settings: + provider_fields: + github_project_v2: + project_id: PVT_PROJECT_SPEC + type_field_id: PVT_FIELD_TYPE_SPEC + type_option_ids: + task: PVT_OPTION_TASK_SPEC + github_issue_types: + type_ids: + task: IT_TASK_SPEC +""".strip(), + encoding="utf-8", + ) + + created_payloads: list[dict] = [] + adapter = _FakeAdapter(items=[], relationships=[], created=created_payloads) + monkeypatch.setattr(AdapterRegistry, "get_adapter", lambda _adapter: adapter) + + result = runner.invoke( + backlog_app, + [ + "add", + "--project-id", + "nold-ai/specfact-demo-repo", + "--adapter", + "github", + "--template", + "github_projects", + "--type", + "task", + "--title", + "Implement task", + "--body", + "Body", + "--non-interactive", + "--repo-path", + str(tmp_path), + ], + ) + + assert result.exit_code == 0 + assert created_payloads + provider_fields = created_payloads[0].get("provider_fields") + assert isinstance(provider_fields, dict) + github_project_v2 = provider_fields.get("github_project_v2") + assert isinstance(github_project_v2, dict) + assert github_project_v2.get("project_id") == "PVT_PROJECT_SPEC" + assert github_project_v2.get("type_field_id") == "PVT_FIELD_TYPE_SPEC" + assert github_project_v2.get("type_option_ids", {}).get("task") == "PVT_OPTION_TASK_SPEC" + github_issue_types = provider_fields.get("github_issue_types") + assert isinstance(github_issue_types, dict) + assert github_issue_types.get("type_ids", {}).get("task") == "IT_TASK_SPEC" + assert "repository issue-type mapping is not configured" not in result.stdout + + +def test_backlog_add_warns_when_github_issue_type_mapping_missing(monkeypatch) -> None: + """backlog add warns when repository issue-type mapping is unavailable for selected type.""" + from specfact_cli.adapters.registry import AdapterRegistry + + created_payloads: list[dict] = [] + adapter = _FakeAdapter(items=[], relationships=[], created=created_payloads) + monkeypatch.setattr(AdapterRegistry, "get_adapter", lambda _adapter: adapter) + + result = runner.invoke( + backlog_app, + [ + "add", + "--project-id", + "nold-ai/specfact-demo-repo", + "--adapter", + "github", + "--type", + "spike", + "--title", + "Sample task", + "--body", + "Body", + "--non-interactive", + ], + ) + + assert result.exit_code == 0 + assert "repository issue-type mapping is not configured" in result.stdout diff --git a/modules/backlog-core/tests/unit/test_backlog_protocol.py b/modules/backlog-core/tests/unit/test_backlog_protocol.py index 7fe06733..bd5db3c7 100644 --- a/modules/backlog-core/tests/unit/test_backlog_protocol.py +++ b/modules/backlog-core/tests/unit/test_backlog_protocol.py @@ -32,6 +32,10 @@ def fetch_relationships(self, project_id: str) -> list[dict]: _ = project_id return [] + def create_issue(self, project_id: str, payload: dict) -> dict: + _ = project_id, payload + return {"id": "1", "key": "1", "url": "https://example.test/1"} + class _InvalidAdapter: def fetch_all_issues(self, project_id: str, filters: dict | None = None) -> list[dict]: diff --git a/modules/bundle-mapper/src/bundle_mapper/__init__.py b/modules/bundle-mapper/src/bundle_mapper/__init__.py index d23cba4a..94555d77 100644 --- a/modules/bundle-mapper/src/bundle_mapper/__init__.py +++ b/modules/bundle-mapper/src/bundle_mapper/__init__.py @@ -1,7 +1,8 @@ """Bundle mapper module: confidence-based spec-to-bundle assignment with interactive review.""" -from bundle_mapper.mapper.engine import BundleMapper -from bundle_mapper.models.bundle_mapping import BundleMapping +from .commands import commands_interface +from .mapper.engine import BundleMapper +from .models.bundle_mapping import BundleMapping -__all__ = ["BundleMapper", "BundleMapping"] +__all__ = ["BundleMapper", "BundleMapping", "commands_interface"] diff --git a/modules/bundle-mapper/src/bundle_mapper/commands/__init__.py b/modules/bundle-mapper/src/bundle_mapper/commands/__init__.py index 191c5148..c1979e60 100644 --- a/modules/bundle-mapper/src/bundle_mapper/commands/__init__.py +++ b/modules/bundle-mapper/src/bundle_mapper/commands/__init__.py @@ -1 +1,20 @@ -"""Command hooks for backlog refine/import --auto-bundle (used when module is loaded).""" +"""Command hooks and ModuleIOContract exports for bundle-mapper.""" + +from specfact_cli.contracts.module_interface import ModuleIOContract +from specfact_cli.modules import module_io_shim + + +_MODULE_IO_CONTRACT = ModuleIOContract +import_to_bundle = module_io_shim.import_to_bundle +export_from_bundle = module_io_shim.export_from_bundle +sync_with_bundle = module_io_shim.sync_with_bundle +validate_bundle = module_io_shim.validate_bundle +commands_interface = module_io_shim + +__all__ = [ + "commands_interface", + "export_from_bundle", + "import_to_bundle", + "sync_with_bundle", + "validate_bundle", +] diff --git a/openspec/changes/backlog-core-02-interactive-issue-creation/CHANGE_VALIDATION.md b/openspec/changes/backlog-core-02-interactive-issue-creation/CHANGE_VALIDATION.md index 57590e37..d32a69e7 100644 --- a/openspec/changes/backlog-core-02-interactive-issue-creation/CHANGE_VALIDATION.md +++ b/openspec/changes/backlog-core-02-interactive-issue-creation/CHANGE_VALIDATION.md @@ -1,92 +1,91 @@ # Change Validation Report: backlog-core-02-interactive-issue-creation -**Validation Date**: 2026-01-31T00:32:54+01:00 +**Validation Date**: 2026-02-21 01:57:48 +0100 **Change Proposal**: [proposal.md](./proposal.md) -**Validation Method**: Dry-run simulation and format/OpenSpec compliance check +**Validation Method**: Dry-run simulation in temporary workspace + dependency scan ## Executive Summary -- **Breaking Changes**: 1 interface extension (new abstract method on BacklogAdapterMixin); all concrete backlog adapters must implement it. -- **Dependent Files**: 2 affected (GitHubAdapter, AdoAdapter); no existing callers of create_issue. -- **Impact Level**: Low -- **Validation Result**: Pass -- **User Decision**: N/A (no breaking-change options required) +- Breaking Changes: 0 detected / 0 resolved +- Dependent Files: 6 affected +- Impact Level: Medium +- Validation Result: Pass +- User Decision: Proceed with implementation in current scope ## Breaking Changes Detected -### Interface: BacklogAdapterMixin.create_issue - -- **Type**: New abstract method -- **Old Signature**: (none; method does not exist) -- **New Signature**: `create_issue(project_id: str, payload: dict) -> dict` -- **Breaking**: Yes for implementors (any class inheriting BacklogAdapterMixin must implement the new method) -- **Dependent Files**: - - `src/specfact_cli/adapters/github.py`: Must implement create_issue - - `src/specfact_cli/adapters/ado.py`: Must implement create_issue - -**Mitigation**: Change scope already includes implementing create_issue in both GitHub and ADO adapters; no external dependents of BacklogAdapterMixin exist outside this repo. No scope extension needed. +No breaking API/interface changes were detected from the proposed delta: +- `load_backlog_config_from_backlog_file()` is additive. +- Existing `load_backlog_config_from_spec()` remains available for compatibility fallback. +- `backlog map-fields` CLI enhancements are backward compatible for existing ADO usage. ## Dependencies Affected ### Critical Updates Required - -- `src/specfact_cli/adapters/github.py`: Implement create_issue (in scope) -- `src/specfact_cli/adapters/ado.py`: Implement create_issue (in scope) +- None ### Recommended Updates +- `modules/backlog-core/src/backlog_core/graph/builder.py`: consider reading `.specfact/backlog-config.yaml` first in a follow-up for full consistency. +- docs pages referencing `backlog map-fields` options should include provider-based flow. -- None +### Directly Scanned Dependencies +- `modules/backlog-core/src/backlog_core/commands/add.py` +- `modules/backlog-core/src/backlog_core/graph/builder.py` +- `modules/backlog-core/tests/unit/test_schema_extensions.py` +- `modules/backlog-core/tests/unit/test_add_command.py` +- `tests/unit/commands/test_backlog_commands.py` +- `src/specfact_cli/modules/backlog/src/commands.py` ## Impact Assessment -- **Code Impact**: Additive; new command and adapter method. Existing refine/sync/analyze-deps unchanged. -- **Test Impact**: New tests for create_issue and add command (TDD in tasks). -- **Documentation Impact**: docs/guides/agile-scrum-workflows.md, backlog guide for backlog add workflow. -- **Release Impact**: Minor (new feature). +- **Code Impact**: New backlog config scaffold command and provider-aware map-fields persistence. +- **Test Impact**: New tests required for init-config and github map-fields persistence; existing map-fields tests retained. +- **Documentation Impact**: map-fields and backlog config docs should mention `.specfact/backlog-config.yaml`. +- **Release Impact**: Minor (feature enhancement, backward compatible) -## Dependency on add-backlog-dependency-analysis-and-commands +## User Decision -- **Note**: The plan states this change "Depends on" add-backlog-dependency-analysis-and-commands (BacklogGraphBuilder, fetch_all_issues, fetch_relationships). If that change is not yet merged, implementation can use minimal graph usage (e.g. fetch_backlog_item to validate parent exists) as stated in proposal Impact. No ambiguity; design and tasks already allow fallback. +**Decision**: Implement now +**Rationale**: Align backlog provider configuration under dedicated `.specfact/backlog-config.yaml` and keep module metadata in sync with marketplace updates. +**Next Steps**: +1. Implement `specfact backlog init-config` scaffold. +2. Extend `specfact backlog map-fields` for provider selection and provider-specific persistence. +3. Run quality gates (format/type/contract) and targeted tests for modified test modules. ## Format Validation - **proposal.md Format**: Pass - - Title format: Correct (# Change: ...) - - Required sections: All present (Why, What Changes, Capabilities, Impact) - - "What Changes" format: Correct (NEW/EXTEND bullets) - - "Capabilities" section: Present (backlog-add) + - Title format: Correct + - Required sections: All present (`Why`, `What Changes`, `Capabilities`, `Impact`) + - "What Changes" format: Correct + - "Capabilities" section: Present - "Impact" format: Correct - - Source Tracking section: Present (GitHub #173) + - Source Tracking section: Present - **tasks.md Format**: Pass - - Section headers: Hierarchical numbered (## 1. ... ## 10.) - - Task format: - [ ] N.N Description - - Sub-task format: Indented - [ ] N.N.N - - Config.yaml compliance: Pass (TDD section, branch first, PR last, version/changelog task, GitHub issue task) -- **specs/backlog-add/spec.md Format**: Pass (ADDED requirements, Given/When/Then) -- **design.md Format**: Pass (bridge adapter, sequence, contract, fallback) + - Section headers: Correct + - Task format: Correct + - Sub-task format: Correct + - Config.yaml compliance: Pass (worktree + testing + quality gate tasks present) +- **specs Format**: Pass + - Given/When/Then format: Verified + - References existing patterns: Verified +- **design.md Format**: Pass + - Bridge adapter integration: Documented + - Sequence diagrams: Not required for this delta +- **Format Issues Found**: 0 +- **Format Issues Fixed**: 0 - **Config.yaml Compliance**: Pass ## OpenSpec Validation - **Status**: Pass -- **Validation Command**: `openspec validate add-backlog-add-interactive-issue-creation --strict` +- **Validation Command**: `openspec validate backlog-core-02-interactive-issue-creation --strict` - **Issues Found**: 0 - **Issues Fixed**: 0 +- **Re-validated**: Yes ## Validation Artifacts -- No temporary workspace used (validation was format and dependency analysis only). -- Breaking change is in-scope (adapter implementations are part of the change). - -## Module Architecture Alignment (Re-validated 2026-02-10) - -This change was re-validated after renaming and updating to align with the modular architecture (arch-01 through arch-07): - -- Module package structure updated to `modules/{name}/module-package.yaml` pattern -- CLI command registration moved from `cli.py` to `module-package.yaml` declarations -- Core model modifications replaced with arch-07 schema extensions where applicable -- Adapter protocol extensions use arch-05 bridge registry (no direct mixin modification) -- Publisher and integrity metadata added for arch-06 marketplace readiness -- All old change ID references updated to new module-scoped naming - -**Result**: Pass — format compliant, module architecture aligned, no breaking changes introduced. +- Temporary workspace: `/tmp/specfact-validation-backlog-core-02-1771635189/repo` +- Interface scaffolds: analyzed in-place via additive function diff (`config_schema.py`, `commands.py`, `add.py`) +- Dependency graph: generated from `rg` dependency scans across `src/`, `modules/`, and `tests/` diff --git a/openspec/changes/backlog-core-02-interactive-issue-creation/TDD_EVIDENCE.md b/openspec/changes/backlog-core-02-interactive-issue-creation/TDD_EVIDENCE.md new file mode 100644 index 00000000..ab2d05ee --- /dev/null +++ b/openspec/changes/backlog-core-02-interactive-issue-creation/TDD_EVIDENCE.md @@ -0,0 +1,54 @@ +# TDD Evidence: backlog-core-02-interactive-issue-creation + +## Failing-before Implementation + +- Timestamp: 2026-02-20 23:06:46 +0100 +- Command: + +```bash +hatch test --cover -v modules/backlog-core/tests/unit/test_backlog_protocol.py modules/backlog-core/tests/unit/test_adapter_create_issue.py modules/backlog-core/tests/unit/test_add_command.py +``` + +- Result: **FAILED** (expected at this stage) +- Failure summary: + - `GitHubAdapter` missing `create_issue(...)` + - `AdoAdapter` missing `create_issue(...)` + - `specfact backlog add` command not registered/implemented yet (`SystemExit(2)` in command tests) + +## Passing-after Implementation + +- Timestamp: 2026-02-20 23:11:39 +0100 +- Command: + +```bash +hatch test -v modules/backlog-core/tests/unit/test_backlog_protocol.py modules/backlog-core/tests/unit/test_adapter_create_issue.py modules/backlog-core/tests/unit/test_add_command.py +``` + +- Result: **PASSED** (11 passed) + +## Regression Fix Round: Sprint Persistence and Canonical GitHub Created ID + +### Failing-before Implementation + +- Timestamp: 2026-02-22 23:16:44 +0100 +- Command: + +```bash +hatch test -v modules/backlog-core/tests/unit/test_adapter_create_issue.py +``` + +- Result: **FAILED** (expected at this stage) +- Failure summary: + - GitHub `create_issue` returned internal DB id in `id` instead of canonical issue number (`id != key`). + - ADO `create_issue` did not map payload `sprint` to `/fields/System.IterationPath`. + +### Passing-after Implementation + +- Timestamp: 2026-02-22 23:17:07 +0100 +- Command: + +```bash +hatch test -v modules/backlog-core/tests/unit/test_adapter_create_issue.py +``` + +- Result: **PASSED** (5 passed) diff --git a/openspec/changes/backlog-core-02-interactive-issue-creation/proposal.md b/openspec/changes/backlog-core-02-interactive-issue-creation/proposal.md index 6d7cce6a..01589730 100644 --- a/openspec/changes/backlog-core-02-interactive-issue-creation/proposal.md +++ b/openspec/changes/backlog-core-02-interactive-issue-creation/proposal.md @@ -2,56 +2,51 @@ ## Why +After backlog-core-01, teams can analyze dependencies but still create new work items manually in GitHub/ADO. That causes hierarchy drift (wrong parent types), missing readiness fields, and inconsistent sprint/iteration assignment. -After implementing backlog adapters and dependency analysis (backlog-core-01), teams can analyze and sync backlog items but cannot create new issues from the CLI with proper scoping, hierarchy alignment, and Definition of Ready (DoR) checks. Without a dedicated add flow, users create issues manually in GitHub/ADO and risk orphaned or misaligned items. Adding `specfact backlog add` enables interactive creation with AI copilot assistance: draft → review → enhance → validate (graph, DoR) → create, so new issues fit the existing backlog structure and value chain. - -This change extends the **`backlog-core` module** (backlog-core-01) with the `backlog add` command. - -## Module Package Structure - -This change adds to the existing `modules/backlog-core/` module: - -``` -modules/backlog-core/ - module-package.yaml # updated: add 'backlog add' to commands list - src/backlog_core/ - commands/ - add.py # specfact backlog add (interactive issue creation) - adapters/ - backlog_protocol.py # extended: add create_issue() to BacklogGraphProtocol -``` - -**`module-package.yaml` update:** Add `backlog add` to commands list. No new module; this is a capability increment to backlog-core. - -## Module Package Structure - -This change adds to the existing `modules/backlog-core/` module: - -``` -modules/backlog-core/ - module-package.yaml # updated: add 'backlog add' to commands list - src/backlog_core/ - commands/ - add.py # specfact backlog add (interactive issue creation) - adapters/ - backlog_protocol.py # extended: add create_issue() to BacklogGraphProtocol -``` - -**`module-package.yaml` update:** Add `backlog add` to commands list. No new module; this is a capability increment to backlog-core. +This change adds `specfact backlog add` as a guided creation workflow in the `backlog` command group, with provider-aware interactive UX and contract-safe adapter writes. ## What Changes - -- **NEW**: Add CLI command `specfact backlog add` in `modules/backlog-core/src/backlog_core/commands/add.py` for interactive creation of backlog issues (epic, feature, story, task, bug, spike) with optional parent, title, body, DoR validation, and optional `--sprint` to assign new issue to sprint (when provider supports it). -- **NEW**: Support multiple backlog levels (epic, feature, story, task, bug, spike, custom) with configurable creation hierarchy (allowed parent types per child type) via template or backlog_config; default derived from existing type_mapping and dependency_rules in `ado_scrum.yaml` / `github_projects.yaml` templates. -- **EXTEND** (arch-05 bridge registry): Extend `BacklogGraphProtocol` in `modules/backlog-core/src/backlog_core/adapters/backlog_protocol.py` with `create_issue(project_id: str, payload: dict) -> dict` returning created item (id, key, url). Adapter modules (github-adapter, ado-adapter) implement this method and register updated protocol conformance via bridge registry. -- **NEW**: Add flow: load graph (BacklogGraphBuilder, fetch_all_issues, fetch_relationships from backlog-core-01), resolve type and parent from template/hierarchy, validate parent exists and allowed type, optional DoR check (**use policy-engine-01 when available**), map draft to provider payload, call adapter `create_issue`, output created id/key/url. -- **EXTEND** (E5): Provide draft patch preview before create (integrate with patch-mode-01 when available) so user can review proposed issue body/fields before creating. -- **EXTEND** (E5): When linking to existing issues (e.g. parent, blocks), support fuzzy match + user confirmation; no silent link (aligns with bundle-mapper-01). -- **EXTEND**: Template or backlog_config with optional `creation_hierarchy` (allowed parent types per child type) so Scrum/SAFe/Kanban and custom hierarchies work without code changes. +- **NEW**: `specfact backlog add` in `modules/backlog-core/src/backlog_core/commands/add.py` for interactive and non-interactive issue/work-item creation. +- **EXTEND**: Backlog adapter protocol with `create_issue(project_id: str, payload: dict) -> dict` and concrete implementations in GitHub and ADO adapters. +- **EXTEND**: GitHub parent assignment uses native issue relationship metadata (sidebar parent/sub-issue) via GraphQL sub-issue linking, not only body text conventions. +- **NEW**: Configurable creation hierarchy (`creation_hierarchy`) from template/config for parent-type validation (for example epic -> feature -> story -> task). +- **NEW**: Interactive creation UX for required fields including type/title/body, parent selection, sprint/iteration selection, and immediate create-progress feedback. +- **NEW**: Multiline body entry with non-markdown sentinel (default `::END::`) and configurable marker. +- **NEW**: Provider-agnostic draft fields for story-quality capture where applicable: acceptance criteria, priority, story points. +- **NEW**: Description format selection (`markdown` or `classic`) with provider mapping (`ADO multiline format` handling). +- **EXTEND**: GitHub custom mapping parity with ADO behavior: when `.specfact/templates/backlog/field_mappings/github_custom.yaml` exists and `--custom-config` is omitted, `backlog add` auto-loads it; otherwise it falls back to default mappings. +- **EXTEND**: Parent selection behavior: + - ADO: hierarchy-aware parent candidates filtered by allowed parent types. + - GitHub: select from available issues and normalized type mapping (including custom/epic labels when configured). +- **EXTEND**: `specfact backlog map-fields` to support a multi-provider field mapping workflow (ADO + GitHub), including auth checks, provider field discovery, mapping verification, and config persistence in `.specfact/backlog-config.yaml`. For GitHub, issue-type source-of-truth is repository issue types (`repository.issueTypes`), while ProjectV2 Type option mapping is optional enrichment when a suitable Type-like single-select field exists. ## Capabilities -- **backlog-core** (extended): `backlog add` — interactive creation of backlog issues with type/parent selection, draft validation (graph and DoR), and create via adapter protocol; multi-level support with configurable hierarchy. + +- **backlog-core** (extended): `backlog add` interactive creation flow with hierarchy validation, readiness checks, and adapter-backed create operations. +- **backlog** (extended): Provider-aware `backlog init-config` scaffolding and `backlog map-fields` setup for mapping backlog fields across supported adapters. +- **backlog** (extended): Minimal default backlog-config scaffolding (without empty GitHub ProjectV2 placeholders); persist ProjectV2 mapping only when explicitly configured/discovered. + +## Impact + +- **Affected specs**: `openspec/changes/backlog-core-02-interactive-issue-creation/specs/backlog-add/spec.md` +- **Affected code**: + - `modules/backlog-core/src/backlog_core/commands/add.py` + - `modules/backlog-core/src/backlog_core/adapters/backlog_protocol.py` + - `modules/backlog-core/src/backlog_core/graph/config_schema.py` + - `modules/backlog-core/src/backlog_core/graph/builder.py` + - `src/specfact_cli/adapters/backlog_base.py` + - `src/specfact_cli/adapters/github.py` + - `src/specfact_cli/adapters/ado.py` + - `src/specfact_cli/modules/backlog/src/commands.py` +- **Affected tests**: + - `modules/backlog-core/tests/unit/test_add_command.py` + - `modules/backlog-core/tests/unit/test_adapter_create_issue.py` + - `modules/backlog-core/tests/unit/test_backlog_protocol.py` +- **Documentation impact**: + - `docs/guides/agile-scrum-workflows.md` + - `docs/reference/commands.md` --- @@ -60,5 +55,6 @@ modules/backlog-core/ - **GitHub Issue**: #173 - **Issue URL**: +- **Repository**: nold-ai/specfact-cli - **Last Synced Status**: proposed - **Sanitized**: false diff --git a/openspec/changes/backlog-core-02-interactive-issue-creation/specs/backlog-add/spec.md b/openspec/changes/backlog-core-02-interactive-issue-creation/specs/backlog-add/spec.md index 4246fd9e..ac099b09 100644 --- a/openspec/changes/backlog-core-02-interactive-issue-creation/specs/backlog-add/spec.md +++ b/openspec/changes/backlog-core-02-interactive-issue-creation/specs/backlog-add/spec.md @@ -23,6 +23,7 @@ The system SHALL extend backlog adapters with a create method that accepts a uni - Payload is provider-agnostic (type, title, description, parent_id, optional fields) - Adapter performs provider-specific mapping (e.g. GitHub labels for type, body for description) - Failure (auth, validation) is reported; no silent swallow +- Returned created-item identity uses canonical GitHub issue number for both `id` and `key` so follow-up parent/reference inputs resolve consistently. #### Scenario: Create work item via ADO adapter @@ -38,6 +39,7 @@ The system SHALL extend backlog adapters with a create method that accepts a uni - ADO work item type is derived from unified type via template type_mapping - Parent link is created when parent_id is present and adapter supports it +- When payload includes `sprint`, adapter maps it to `System.IterationPath` in create patch payload. ### Requirement: Backlog add command @@ -112,7 +114,7 @@ The system SHALL support configurable creation hierarchy (allowed parent types p #### Scenario: Custom hierarchy in backlog_config -**Given**: ProjectBundle.metadata.backlog_config (or .specfact/spec.yaml backlog section) contains creation_hierarchy with entries such as story: [feature, epic], task: [story] +**Given**: ProjectBundle.metadata.backlog_config (or .specfact/backlog-config.yaml) contains creation_hierarchy with entries such as story: [feature, epic], task: [story] **When**: The user adds an item with --type story --parent FEAT-1 @@ -158,3 +160,233 @@ The system SHALL support optional `--sprint ` so the created issue ca **Acceptance Criteria**: - Fuzzy match is used for discovery only; linking requires user confirmation; no silent writes. + +### Requirement: Interactive drafting fields and format selection + +The system SHALL collect story-quality drafting fields during interactive creation where applicable and map them into provider payloads before create. + +#### Scenario: Collect multiline body with non-conflicting sentinel + +**Given**: User runs interactive `specfact backlog add` without `--body` + +**When**: The command prompts for multiline body input + +**Then**: The command accepts multiline text until sentinel marker is entered (default `::END::`) + +**And**: The command shows immediate progress feedback that input capture is complete and creation preparation has started + +#### Scenario: Collect acceptance criteria, priority, and story points for story-like types + +**Given**: User selects a story-like type (story/task/feature where supported) + +**When**: The command asks for quality fields + +**Then**: Acceptance criteria is collected via multiline input + +**And**: Priority and story points are collected (interactive prompts or explicit options) + +**And**: Collected values are included in the create payload where provider supports them + +#### Scenario: Select description format before create + +**Given**: Interactive creation mode + +**When**: The user is prompted for description format (`markdown` or `classic`) + +**Then**: Selected format is included in the payload + +**And**: Provider mapping respects format (for ADO: multiline field format set according to selected mode) + +### Requirement: Interactive sprint/iteration and parent selection + +The system SHALL prompt for sprint/iteration and parent assignment in interactive mode and validate both against provider and hierarchy constraints. + +#### Scenario: Interactive sprint/iteration selection for ADO + +**Given**: ADO adapter can resolve current and available iterations + +**When**: User runs interactive add without `--sprint` + +**Then**: The command shows selectable iteration options (including current and skip) + +**And**: Selected iteration is included in payload + +#### Scenario: Interactive parent selection using hierarchy constraints + +**Given**: Graph and creation hierarchy are loaded + +**When**: User opts to set a parent interactively + +**Then**: Candidate parents are filtered to allowed parent types for selected child type + +**And**: User selects parent from existing items + +**And**: Selected parent id is written as `parent_id` in payload + +#### Scenario: GitHub parent selection reflects mapped type consistency + +**Given**: GitHub issues use label/type mapping and may include custom hierarchy labels (e.g. epic) + +**When**: Parent candidates are presented + +**Then**: Candidate type resolution uses current template type mapping / normalized graph type + +**And**: Parent compatibility follows creation hierarchy rules with no hardcoded provider-only assumptions + + +### Requirement: Centralized retry policy for backlog adapter write operations + +The system SHALL apply a shared retry policy for transient failures in backlog adapter create operations so command behavior is consistent across providers. + +#### Scenario: Retry transient create failure and succeed + +**Given**: A backlog adapter create call receives a transient failure (for example timeout, connection error, HTTP 429, or HTTP 5xx) + +**When**: The command executes `create_issue` + +**Then**: The adapter uses centralized retry logic with bounded attempts and backoff + +**And**: If a later attempt succeeds, the command returns success with created item metadata + +#### Scenario: Non-transient create failure does not retry + +**Given**: A backlog adapter create call fails with non-transient error (for example HTTP 400/401/403/404) + +**When**: The command executes `create_issue` + +**Then**: The adapter does not retry unnecessarily + +**And**: The failure is surfaced immediately to the caller with context + + +#### Scenario: Non-idempotent create avoids ambiguous automatic retry + +**Given**: A create operation is non-idempotent and the transport fails ambiguously (for example timeout/connection drop after request may have reached provider) + +**When**: The adapter executes create via shared retry core logic + +**Then**: The adapter does not automatically replay the create request in that ambiguous state + +**And**: The error is surfaced so caller can verify provider state and retry intentionally + +### Requirement: Adapter-aware default template selection for parent hierarchy + +The system SHALL default template selection by adapter when user does not explicitly pass `--template` so hierarchy/type mapping remains provider-consistent. + +#### Scenario: ADO backlog add defaults to ado_scrum mapping + +**Given**: User runs `specfact backlog add --adapter ado` without `--template` + +**When**: The command builds graph and parent candidates + +**Then**: It uses ADO-compatible template mapping (default `ado_scrum`) + +**And**: Epic/feature/story hierarchy candidates are resolved consistently for parent selection + + +### Requirement: Shared retry policy applied consistently across adapter write operations + +The system SHALL apply centralized retry policy to backlog adapter write operations beyond create, with operation-specific ambiguity safety. + +#### Scenario: Non-idempotent write uses duplicate-safe mode + +**Given**: Adapter operation is non-idempotent (for example comment creation) + +**When**: Shared retry helper is used + +**Then**: Ambiguous transport replay is disabled to avoid duplicate side effects + +#### Scenario: Idempotent update uses bounded transient retry + +**Given**: Adapter operation is idempotent (for example status/body patch) + +**When**: Shared retry helper is used + +**Then**: Transient HTTP failures are retried with bounded backoff + +**And**: Non-transient failures are surfaced immediately + + +### Requirement: Parent candidate discovery must not exclude valid hierarchy parents by implicit sprint defaults + +The system SHALL avoid implicit current-iteration filtering when loading parent candidates for interactive parent selection. + +#### Scenario: ADO parent candidate fetch includes epics without sprint assignment + +**Given**: User creates a feature and opts to choose parent interactively in ADO + +**When**: Parent candidates are loaded for hierarchy filtering + +**Then**: Parent discovery does not implicitly limit candidates to current iteration + +**And**: Epics/features outside current iteration remain selectable when hierarchy allows + +### Requirement: User warning on duplicate-safe ambiguous create failure + +The system SHALL display a user-facing warning when non-idempotent create fails due to ambiguous transport errors while duplicate-safe retry mode is active. + +#### Scenario: Timeout/connection drop on duplicate-safe create + +**Given**: Create uses duplicate-safe mode (no ambiguous replay) + +**When**: Create fails with timeout/connection error + +**Then**: CLI warns the user that the item may have been created remotely + +**And**: CLI advises verifying backlog before retrying manually + + +#### Scenario: ADO sprint selection resolves iterations using project_id context + +**Given**: User runs `backlog add` with `--adapter ado --project-id /` and adapter defaults do not already include org/project + +**When**: Interactive sprint/iteration selection is shown + +**Then**: The command resolves ADO org/project context from project_id for iteration API calls + +**And**: Available iterations are listed for selection when accessible + + +#### Scenario: GitHub backlog add forwards Projects Type field configuration + +**Given**: `backlog add` runs with GitHub adapter and template/custom config contains GitHub Projects v2 type field mapping metadata + +**When**: The command builds create payload for `create_issue` + +**Then**: It forwards provider field metadata in payload (for example `provider_fields.github_project_v2`) so the adapter can set the Projects `Type` field in addition to labels + + +#### Scenario: GitHub ProjectV2 Type mapping can come from repo backlog provider settings + +**Given**: `.specfact/backlog-config.yaml` defines `backlog_config.providers.github.settings.provider_fields.github_project_v2` + +**When**: `backlog add` runs with GitHub adapter and no explicit `--custom-config` + +**Then**: The command forwards that provider field configuration in create payload so adapter ProjectV2 Type mapping can run + + +#### Scenario: GitHub add warns when ProjectV2 Type mapping config is absent + +**Given**: User runs `backlog add` with GitHub adapter and no ProjectV2 Type mapping metadata is available + +**When**: The command prepares create payload + +**Then**: The command prints a warning that GitHub ProjectV2 Type field will not be set automatically and labels/body fallback is used + + +#### Scenario: GitHub custom mapping file auto-applies when present + +**Given**: `--adapter github` and no `--custom-config` flag is provided +**And**: `.specfact/templates/backlog/field_mappings/github_custom.yaml` exists +**When**: The user runs `specfact backlog add` +**Then**: The command loads `github_custom.yaml` as custom mapping/hierarchy overrides +**And**: Parent validation and candidate filtering use those overrides +**And**: If the file does not exist, the command falls back to default `github_projects` mapping behavior. + + +#### Scenario: GitHub parent is linked using native sub-issue relationship + +**Given**: A GitHub parent issue is selected during `backlog add` +**When**: The issue is created +**Then**: The adapter links parent/child using GitHub native issue relationship (`addSubIssue`) so the right-sidebar parent relation is populated +**And**: Body text markers are secondary compatibility metadata, not the primary relationship mechanism. diff --git a/openspec/changes/backlog-core-02-interactive-issue-creation/specs/backlog-map-fields/spec.md b/openspec/changes/backlog-core-02-interactive-issue-creation/specs/backlog-map-fields/spec.md new file mode 100644 index 00000000..1a6a996d --- /dev/null +++ b/openspec/changes/backlog-core-02-interactive-issue-creation/specs/backlog-map-fields/spec.md @@ -0,0 +1,113 @@ +# Backlog Map Fields (Multi-Provider Mapping Setup) + +## ADDED Requirements + +### Requirement: Backlog-scoped config scaffolding command + +The system SHALL provide a backlog-scoped scaffolding command `specfact backlog init-config` that creates `.specfact/backlog-config.yaml` with safe defaults. + +**Rationale**: Backlog mapping config should live under backlog command ownership and not require manual file creation. + +#### Scenario: Initialize backlog config defaults + +**Given**: User runs `specfact backlog init-config` in a repository + +**When**: `.specfact/backlog-config.yaml` does not exist + +**Then**: The command creates `.specfact/backlog-config.yaml` with minimal provider settings defaults + +**And**: GitHub defaults do not include empty ProjectV2 id/option placeholders; ProjectV2 mapping is written only when configured + +**And**: The command prints next steps for `specfact backlog map-fields` + +#### Scenario: Initialize backlog config without overwrite + +**Given**: `.specfact/backlog-config.yaml` already exists + +**When**: User runs `specfact backlog init-config` without force option + +**Then**: The command does not overwrite existing config and reports how to proceed safely + +### Requirement: Multi-provider map-fields setup workflow + +The system SHALL provide a provider-aware `specfact backlog map-fields` workflow that supports configuring mapping metadata for one or more backlog adapters in a single guided run. + +**Rationale**: Users currently need different setup paths per provider and manual config edits for some providers. A unified setup flow prevents missing mappings and hidden fallback behavior. + +#### Scenario: Select providers and run setup sequentially + +**Given**: User runs `specfact backlog map-fields` + +**When**: User selects one or more providers to configure (for example `ado` and `github`) + +**Then**: The command executes setup for each selected provider in sequence + +**And**: The command prints per-provider success/failure status with actionable next steps + +### Requirement: Provider auth and field discovery checks + +The system SHALL verify auth context and discover provider fields/metadata before accepting mappings. + +#### Scenario: ADO mapping setup with API discovery + +**Given**: ADO provider is selected + +**When**: The command validates auth and loads ADO work item fields from API + +**Then**: The user maps required canonical fields to available ADO fields + +**And**: The command validates mapped field ids before saving + +#### Scenario: GitHub ProjectV2 Type mapping setup with API discovery + +**Given**: GitHub provider is selected + +**When**: The command validates auth and loads ProjectV2 metadata (project, Type field, options) + +**Then**: The user maps canonical issue types (`epic`, `feature`, `story`, `task`, `bug`) to ProjectV2 Type options + +**And**: The command validates selected option IDs before saving + +#### Scenario: GitHub issue types are sourced from repository metadata + +**Given**: GitHub provider is selected + +**When**: The command loads repository issue types via GitHub GraphQL (`repository.issueTypes`) + +**Then**: Canonical type mapping is derived from repository issue type names/ids (for example `epic`, `feature`, `story`, `task`, `bug`) + +**And**: This source is preferred over ProjectV2 `Status` options for issue-type identity + +#### Scenario: ProjectV2 type-option mapping is optional when Type field is absent + +**Given**: GitHub ProjectV2 has no Type-like single-select field (for example only `Status`) + +**When**: The user runs `specfact backlog map-fields` for GitHub + +**Then**: The command persists repository issue-type mappings and warns that ProjectV2 Type option mapping is skipped + +**And**: The command does not fail solely because ProjectV2 Type options are unavailable + +### Requirement: Canonical config persistence and verification + +The system SHALL persist provider mapping outputs into canonical backlog config and verify integrity post-write. + +#### Scenario: Persist provider mappings into .specfact/backlog-config.yaml + +**Given**: User completes mapping flow for one or more providers + +**When**: The command writes configuration + +**Then**: Mappings are stored under `backlog_config.providers..settings` in `.specfact/backlog-config.yaml` + +**And**: Existing unrelated config keys are preserved + +#### Scenario: Post-write verification and summary + +**Given**: Mapping write completes + +**When**: Verification runs + +**Then**: The command confirms required keys are present and prints a concise summary of configured providers + +**And**: If verification fails, the command reports the failing keys and exits non-zero diff --git a/openspec/changes/backlog-core-02-interactive-issue-creation/tasks.md b/openspec/changes/backlog-core-02-interactive-issue-creation/tasks.md index 2238207a..d3ef031b 100644 --- a/openspec/changes/backlog-core-02-interactive-issue-creation/tasks.md +++ b/openspec/changes/backlog-core-02-interactive-issue-creation/tasks.md @@ -14,63 +14,97 @@ Do not implement production code for new behavior until the corresponding tests ## 1. Create git worktree branch from dev -- [ ] 1.1 Ensure primary checkout is on dev and up to date: `git checkout dev && git pull origin dev` -- [ ] 1.2 Create dedicated worktree branch (preferred): `scripts/worktree.sh create feature/backlog-core-02-interactive-issue-creation`; if issue exists, link branch to issue with `gh issue develop 173 --repo nold-ai/specfact-cli --name feature/backlog-core-02-interactive-issue-creation` -- [ ] 1.3 Or create worktree branch without issue link: `scripts/worktree.sh create feature/backlog-core-02-interactive-issue-creation` (if no issue yet) -- [ ] 1.4 Verify branch in worktree: `git worktree list` includes the branch path; then run `git branch --show-current` inside that worktree. +- [x] 1.1 Ensure primary checkout is on dev and up to date: `git checkout dev && git pull origin dev` +- [x] 1.2 Create dedicated worktree branch (preferred): `scripts/worktree.sh create feature/backlog-core-02-interactive-issue-creation`; if issue exists, link branch to issue with `gh issue develop 173 --repo nold-ai/specfact-cli --name feature/backlog-core-02-interactive-issue-creation` +- [x] 1.3 Or create worktree branch without issue link: `scripts/worktree.sh create feature/backlog-core-02-interactive-issue-creation` (if no issue yet) +- [x] 1.4 Verify branch in worktree: `git worktree list` includes the branch path; then run `git branch --show-current` inside that worktree. ## 2. Create GitHub issue in nold-ai/specfact-cli (mandatory) -- [ ] 2.1 If issue not yet created: create issue in nold-ai/specfact-cli: `gh issue create --repo nold-ai/specfact-cli --title "[Change] Add backlog add (interactive issue creation)" --body-file --label "enhancement" --label "change-proposal"`. If issue already exists (e.g. #173), skip and ensure proposal.md Source Tracking is up to date. -- [ ] 2.2 Use body from proposal (Why, What Changes, Acceptance Criteria); add footer `*OpenSpec Change Proposal: add-backlog-add-interactive-issue-creation*` -- [ ] 2.3 Update `proposal.md` Source Tracking section with issue number, issue URL, repository nold-ai/specfact-cli, Last Synced Status: proposed +- [x] 2.1 If issue not yet created: create issue in nold-ai/specfact-cli: `gh issue create --repo nold-ai/specfact-cli --title "[Change] Add backlog add (interactive issue creation)" --body-file --label "enhancement" --label "change-proposal"`. If issue already exists (e.g. #173), skip and ensure proposal.md Source Tracking is up to date. +- [x] 2.2 Use body from proposal (Why, What Changes, Acceptance Criteria); add footer `*OpenSpec Change Proposal: add-backlog-add-interactive-issue-creation*` +- [x] 2.3 Update `proposal.md` Source Tracking section with issue number, issue URL, repository nold-ai/specfact-cli, Last Synced Status: proposed - [ ] 2.4 Link issue to project (optional): `gh project item-add 1 --owner nold-ai --url ` (requires `gh auth refresh -s project` if needed) ## 3. Verify spec deltas (SDD: specs first) -- [ ] 3.1 Confirm `specs/backlog-add/spec.md` exists and is complete (ADDED requirements, Given/When/Then for create_issue, add command, creation hierarchy). -- [ ] 3.2 Map scenarios to implementation: create via GitHub/ADO, add command with parent validation, custom hierarchy from config, non-interactive mode. +- [x] 3.1 Confirm `specs/backlog-add/spec.md` exists and is complete (ADDED requirements, Given/When/Then for create_issue, add command, creation hierarchy). +- [x] 3.2 Map scenarios to implementation: create via GitHub/ADO, add command with parent validation, custom hierarchy from config, non-interactive mode. +- [x] 3.3 Confirm `specs/backlog-map-fields/spec.md` is complete for multi-provider map-fields setup behavior. ## 4. Tests first (TDD: write tests from spec scenarios; expect failure) -- [ ] 4.1 Write unit tests for adapter create_issue: mock GitHub/ADO API; assert payload mapping and return shape (id, key, url). -- [ ] 4.2 Write unit or integration tests from `specs/backlog-add/spec.md` scenarios: add with parent validation, hierarchy from config, non-interactive add, DoR check when --check-dor. -- [ ] 4.3 Run tests: `hatch run smart-test-unit` (or equivalent); **expect failure** (no implementation yet). -- [ ] 4.4 Document which scenarios are covered by which test modules. +- [x] 4.1 Write unit tests for adapter create_issue: mock GitHub/ADO API; assert payload mapping and return shape (id, key, url). +- [x] 4.2 Write unit/integration tests from `specs/backlog-add/spec.md` scenarios: parent validation, hierarchy rules, non-interactive add, DoR check, multiline body sentinel, description format selection, and ADO sprint/iteration selection. +- [x] 4.3 Run tests: `hatch run smart-test-unit` (or equivalent); **expect failure** (no implementation yet). +- [x] 4.4 Document which scenarios are covered by which test modules. +- [x] 4.5 Add unit tests for centralized retry behavior (retries on transient failures, no retry on non-transient failures). +- [x] 4.6 Add regression tests for duplicate-safe create retry behavior and ADO parent candidate resolution when template is omitted. +- [x] 4.7 Add regression tests for shared retry policy usage in additional write paths (non-idempotent comments and idempotent updates). +- [x] 4.8 Add regression tests for ADO parent candidate fetch without implicit sprint default and duplicate-safe create warning behavior. +- [x] 4.9 Add regression test for ADO sprint option discovery with project_id-resolved context. +- [x] 4.10 Add regression test for backlog add provider_fields forwarding for GitHub ProjectV2 Type field updates. +- [x] 4.11 Add regression test for backlog-config.yaml provider settings forwarding of GitHub ProjectV2 Type mapping metadata. +- [x] 4.12 Add regression test for missing GitHub ProjectV2 config warning in backlog add output. +- [x] 4.13 Add regression tests for multi-provider map-fields flow (provider selection, auth/discovery checks, config persistence, verification output). +- [x] 4.14 Add regression tests for `backlog init-config` scaffolding behavior (create, no-overwrite, force/override path). +- [x] 4.15 Add regression tests for GitHub repository issue-type discovery and fallback behavior when ProjectV2 has only Status field. +- [x] 4.16 Add regression tests ensuring ADO `create_issue` persists `sprint` to `System.IterationPath` and GitHub `create_issue` returns canonical issue-number identity (`id == key == number`). ## 5. Extend BacklogAdapterMixin with create_issue (TDD: code until tests pass) -- [ ] 5.1 Add abstract method `create_issue(project_id: str, payload: dict) -> dict` to `BacklogAdapterMixin` in `src/specfact_cli/adapters/backlog_base.py` with @abstractmethod, @beartype, and @icontract. -- [ ] 5.2 Implement `create_issue` in GitHub adapter: map payload to GitHub Issues API (POST /repos/{owner}/{repo}/issues); return dict with id, key (number), url. -- [ ] 5.3 Implement `create_issue` in ADO adapter: map payload to ADO Create Work Item API; set parent relation when parent_id present; return dict with id, key, url. -- [ ] 5.4 Run adapter create tests; **expect pass**; fix until tests pass. +- [x] 5.1 Add abstract method `create_issue(project_id: str, payload: dict) -> dict` to `BacklogAdapterMixin` in `src/specfact_cli/adapters/backlog_base.py` with @abstractmethod, @beartype, and @icontract. +- [x] 5.2 Implement `create_issue` in GitHub adapter: map payload to GitHub Issues API (POST /repos/{owner}/{repo}/issues); return dict with id, key (number), url. +- [x] 5.3 Implement `create_issue` in ADO adapter: map payload to ADO Create Work Item API; set parent relation when parent_id present; return dict with id, key, url. +- [x] 5.4 Run adapter create tests; **expect pass**; fix until tests pass. +- [x] 5.5 Fix create-issue regressions: map ADO sprint payload to `System.IterationPath` and normalize GitHub create return identity to issue number. ## 6. Implement creation hierarchy and add command (TDD: code until tests pass) -- [ ] 6.1 Define optional creation_hierarchy in template or backlog_config schema (child type → list of allowed parent types); implement loader (from ProjectBundle.metadata.backlog_config or .specfact/spec.yaml). -- [ ] 6.2 Implement add command: options --adapter, --project-id, --template, --type, --parent, --title, --body, --non-interactive, --check-dor; interactive prompts when key args missing (unless --non-interactive). -- [ ] 6.3 Implement flow: load graph (fetch_all_issues + fetch_relationships or BacklogGraphBuilder when available); resolve type and parent; validate parent exists and allowed type from creation_hierarchy; optional DoR check (reuse backlog refine DoR); build payload; call adapter.create_issue; output id, key, url. -- [ ] 6.4 Register `specfact backlog add` in backlog command group (same place as refine, analyze-deps). -- [ ] 6.5 Run add-command tests; **expect pass**; fix until tests pass. +- [x] 6.1 Define optional creation_hierarchy in template or backlog_config schema (child type → list of allowed parent types); implement loader (from ProjectBundle.metadata.backlog_config or .specfact/backlog-config.yaml). +- [x] 6.2 Implement add command: options --adapter, --project-id, --template, --type, --parent, --title, --body, --non-interactive, --check-dor; interactive prompts when key args missing (unless --non-interactive). +- [x] 6.3 Implement flow: load graph (fetch_all_issues + fetch_relationships or BacklogGraphBuilder when available); resolve type and parent; validate parent exists and allowed type from creation_hierarchy; optional DoR check (reuse backlog refine DoR); build payload; call adapter.create_issue; output id, key, url. +- [x] 6.4 Register `specfact backlog add` in backlog command group (same place as refine, analyze-deps). +- [x] 6.5 Run add-command tests; **expect pass**; fix until tests pass. +- [x] 6.6 Add interactive field collection where appropriate: acceptance criteria (multiline), priority, story points; map to provider payload fields when supported. +- [x] 6.7 Add interactive sprint/iteration selection (ADO) and explicit progress messages after multiline input capture and before create API call. +- [x] 6.8 Add interactive parent assignment flow: ask whether to set parent, then choose from hierarchy-allowed existing issues; apply provider-aware type mapping (including GitHub custom/epic labels via mapping). +- [x] 6.9 Add centralized retry policy in backlog adapter core logic and route GitHub/ADO create operations through it (retry transient failures only). +- [x] 6.10 Guard non-idempotent create operations against ambiguous automatic replay on timeout/connection failure to prevent duplicates. +- [x] 6.11 Resolve adapter-aware default template (ADO -> ado_scrum, GitHub -> github_projects) when --template is not provided. +- [x] 6.12 Apply shared retry policy to additional adapter write operations with per-operation ambiguity safety (non-idempotent vs idempotent). +- [x] 6.13 Disable implicit current-iteration filtering for parent candidate discovery flows (ADO) so hierarchy-valid parents are not hidden. +- [x] 6.14 Add duplicate-safe create failure warning in CLI for ambiguous transport errors (verify backlog before manual retry). +- [x] 6.15 Bind ADO org/project context before interactive sprint lookup so iteration options are discoverable from project_id. +- [x] 6.16 Forward GitHub Projects-v2 Type field configuration from template/custom config into create payload provider_fields. +- [x] 6.17 Resolve GitHub ProjectV2 provider field config from .specfact/backlog-config.yaml backlog provider settings when custom config is not provided. +- [x] 6.18 Add user-facing warning when GitHub ProjectV2 Type mapping config is missing or incomplete. +- [x] 6.19 Extend backlog map-fields into multi-provider guided setup (provider selection and sequential execution). +- [x] 6.20 Implement GitHub ProjectV2 discovery and type-option mapping flow in map-fields. +- [x] 6.21 Persist map-fields outputs into `.specfact/backlog-config.yaml` provider settings and verify required keys post-write. +- [x] 6.22 Add `specfact backlog init-config` command to scaffold `.specfact/backlog-config.yaml` defaults under backlog scope. +- [x] 6.23 Use GitHub repository issue types as source-of-truth in map-fields; keep ProjectV2 Type mapping optional when field/options are unavailable. +- [x] 6.24 Auto-load `.specfact/templates/backlog/field_mappings/github_custom.yaml` for `backlog add` when `--adapter github` and `--custom-config` is omitted; fall back to defaults when absent. +- [x] 6.25 Link GitHub parent selection using native issue relationship (`addSubIssue`) so parent appears in issue sidebar metadata. ## 7. Quality gates -- [ ] 7.1 Run format and type-check: `hatch run format`, `hatch run type-check`. -- [ ] 7.2 Run contract test: `hatch run contract-test`. -- [ ] 7.3 Run full test suite: `hatch run smart-test` (or `hatch run smart-test-full`). -- [ ] 7.4 Ensure all new public APIs have @icontract and @beartype where applicable. +- [x] 7.1 Run format and type-check: `hatch run format`, `hatch run type-check`. +- [x] 7.2 Run contract test: `hatch run contract-test`. +- [x] 7.3 Run full test suite: `hatch run smart-test` (or `hatch run smart-test-full`). +- [x] 7.4 Ensure all new public APIs have @icontract and @beartype where applicable. ## 8. Documentation research and review -- [ ] 8.1 Identify affected documentation: docs/guides/agile-scrum-workflows.md, backlog-refinement or backlog guide. -- [ ] 8.2 Update agile-scrum-workflows (or backlog guide): add section for backlog add (`specfact backlog add`), interactive creation, DoR, slash prompt usage. +- [x] 8.1 Identify affected documentation: docs/guides/agile-scrum-workflows.md, backlog-refinement or backlog guide. +- [x] 8.2 Update agile-scrum-workflows (or backlog guide): add section for backlog add (`specfact backlog add`), interactive creation, DoR, slash prompt usage. - [ ] 8.3 If adding a new doc page: set front-matter (layout, title, permalink, description) and update docs/_layouts/default.html sidebar if needed. ## 9. Version and changelog (patch bump; required before PR) -- [ ] 9.1 Bump **patch** version in `pyproject.toml` (e.g. X.Y.Z → X.Y.(Z+1)). -- [ ] 9.2 Sync version in `setup.py`, `src/__init__.py`, `src/specfact_cli/__init__.py` to match pyproject.toml. -- [ ] 9.3 Add CHANGELOG.md entry under new [X.Y.Z] - YYYY-MM-DD section: **Added** – Backlog add (interactive issue creation): `specfact backlog add` with type/parent selection, DoR validation, and create via adapter. +- [x] 9.1 Bump **patch** version in `pyproject.toml` (e.g. X.Y.Z → X.Y.(Z+1)). +- [x] 9.2 Sync version in `setup.py`, `src/__init__.py`, `src/specfact_cli/__init__.py` to match pyproject.toml. +- [x] 9.3 Add CHANGELOG.md entry under new [X.Y.Z] - YYYY-MM-DD section: **Added** – Backlog add (interactive issue creation): `specfact backlog add` with type/parent selection, DoR validation, and create via adapter. ## 10. Create Pull Request to dev diff --git a/pyproject.toml b/pyproject.toml index 12452a7e..efcad9b2 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -4,7 +4,7 @@ build-backend = "hatchling.build" [project] name = "specfact-cli" -version = "0.35.0" +version = "0.36.0" description = "The swiss knife CLI for agile DevOps teams. Keep backlog, specs, tests, and code in sync with validation and contract enforcement for new projects and long-lived codebases." readme = "README.md" requires-python = ">=3.11" diff --git a/setup.py b/setup.py index 94c550cc..62a8c181 100644 --- a/setup.py +++ b/setup.py @@ -7,7 +7,7 @@ if __name__ == "__main__": _setup = setup( name="specfact-cli", - version="0.35.0", + version="0.36.0", description=( "The swiss knife CLI for agile DevOps teams. Keep backlog, specs, tests, and code in sync with " "validation and contract enforcement for new projects and long-lived codebases." diff --git a/src/__init__.py b/src/__init__.py index d82b7ce8..ed7b37cf 100644 --- a/src/__init__.py +++ b/src/__init__.py @@ -3,4 +3,4 @@ """ # Package version: keep in sync with pyproject.toml, setup.py, src/specfact_cli/__init__.py -__version__ = "0.35.0" +__version__ = "0.36.0" diff --git a/src/specfact_cli/__init__.py b/src/specfact_cli/__init__.py index b44ef2f6..ec9195f7 100644 --- a/src/specfact_cli/__init__.py +++ b/src/specfact_cli/__init__.py @@ -8,6 +8,6 @@ - Supporting agile ceremonies and team workflows """ -__version__ = "0.35.0" +__version__ = "0.36.0" __all__ = ["__version__"] diff --git a/src/specfact_cli/adapters/ado.py b/src/specfact_cli/adapters/ado.py index d4550ddf..ab30030d 100644 --- a/src/specfact_cli/adapters/ado.py +++ b/src/specfact_cli/adapters/ado.py @@ -1657,7 +1657,10 @@ def _create_work_item_from_proposal( ] try: - response = requests.patch(url, json=patch_document, headers=headers, timeout=30) + response = self._request_with_retry( + lambda: requests.patch(url, json=patch_document, headers=headers, timeout=30), + retry_on_ambiguous_transport=False, + ) if is_debug_mode(): debug_log_operation( "ado_patch", @@ -1800,8 +1803,9 @@ def _update_work_item_status( patch_document = [{"op": "replace", "path": "/fields/System.State", "value": ado_state}] try: - response = requests.patch(url, json=patch_document, headers=headers, timeout=30) - response.raise_for_status() + response = self._request_with_retry( + lambda: requests.patch(url, json=patch_document, headers=headers, timeout=30) + ) work_item_data = response.json() work_item_url = work_item_data.get("_links", {}).get("html", {}).get("href", "") @@ -1933,8 +1937,9 @@ def _update_work_item_body( ] try: - response = requests.patch(url, json=patch_document, headers=headers, timeout=30) - response.raise_for_status() + response = self._request_with_retry( + lambda: requests.patch(url, json=patch_document, headers=headers, timeout=30) + ) work_item_data = response.json() work_item_url = work_item_data.get("_links", {}).get("html", {}).get("href", "") @@ -2040,8 +2045,9 @@ def sync_status_to_ado( patch_document = [{"op": "replace", "path": "/fields/System.State", "value": ado_state}] try: - response = requests.patch(url, json=patch_document, headers=headers, timeout=30) - response.raise_for_status() + response = self._request_with_retry( + lambda: requests.patch(url, json=patch_document, headers=headers, timeout=30) + ) work_item_data = response.json() work_item_url = work_item_data.get("_links", {}).get("html", {}).get("href", "") @@ -2442,8 +2448,10 @@ def _add_work_item_comment( comment_body = {"text": comment_text} try: - response = requests.post(url, json=comment_body, headers=headers, timeout=30) - response.raise_for_status() + response = self._request_with_retry( + lambda: requests.post(url, json=comment_body, headers=headers, timeout=30), + retry_on_ambiguous_transport=False, + ) comment_data = response.json() comment_id = comment_data.get("id") @@ -2690,6 +2698,7 @@ def _resolve_sprint_filter( self, sprint_filter: str | None, items: list[BacklogItem], + apply_current_when_missing: bool = True, ) -> tuple[str | None, list[BacklogItem]]: """ Resolve sprint filter with path matching and ambiguity detection. @@ -2705,6 +2714,8 @@ def _resolve_sprint_filter( ValueError: If ambiguous sprint name match is detected """ if not sprint_filter: + if not apply_current_when_missing: + return None, items # No sprint filter - try to get current iteration current_iteration = self._get_current_iteration() if current_iteration: @@ -2859,13 +2870,16 @@ def fetch_backlog_items(self, filters: BacklogFilters) -> list[BacklogItem]: # Sprint will be resolved post-fetch to handle ambiguity pass else: - # No sprint/iteration - try current iteration - current_iteration = self._get_current_iteration() - if current_iteration: - resolved_iteration = current_iteration - conditions.append(f"[System.IterationPath] = '{resolved_iteration}'") - else: - console.print("[yellow]⚠ No current iteration found and no sprint/iteration filter provided[/yellow]") + # No sprint/iteration - optionally use current iteration default + if getattr(filters, "use_current_iteration_default", True): + current_iteration = self._get_current_iteration() + if current_iteration: + resolved_iteration = current_iteration + conditions.append(f"[System.IterationPath] = '{resolved_iteration}'") + else: + console.print( + "[yellow]⚠ No current iteration found and no sprint/iteration filter provided[/yellow]" + ) if conditions: wiql_parts.append("AND " + " AND ".join(conditions)) @@ -3113,7 +3127,11 @@ def fetch_backlog_items(self, filters: BacklogFilters) -> list[BacklogItem]: # Sprint filtering with path matching and ambiguity detection if filters.sprint: try: - _, filtered_items = self._resolve_sprint_filter(filters.sprint, filtered_items) + _, filtered_items = self._resolve_sprint_filter( + filters.sprint, + filtered_items, + apply_current_when_missing=getattr(filters, "use_current_iteration_default", True), + ) except ValueError as e: # Ambiguous sprint match - raise with clear error message console.print(f"[red]Error:[/red] {e}") @@ -3137,6 +3155,115 @@ def fetch_backlog_items(self, filters: BacklogFilters) -> list[BacklogItem]: return filtered_items + @beartype + @require( + lambda project_id: isinstance(project_id, str) and len(project_id.strip()) > 0, "project_id must be non-empty" + ) + @require(lambda payload: isinstance(payload, dict), "payload must be dict") + @ensure(lambda result: isinstance(result, dict), "Must return dict") + def create_issue(self, project_id: str, payload: dict[str, Any]) -> dict[str, Any]: + """Create an Azure DevOps work item from provider-agnostic backlog payload.""" + org, project = self._resolve_graph_project_context(project_id) + if not self.api_token: + raise ValueError("Azure DevOps API token is required") + + title = str(payload.get("title") or "").strip() + if not title: + raise ValueError("payload.title is required") + + raw_type = str(payload.get("type") or "task").strip().lower() + type_mapping = { + "epic": "Epic", + "feature": "Feature", + "story": "User Story", + "user story": "User Story", + "task": "Task", + "bug": "Bug", + "spike": "Task", + } + work_item_type = type_mapping.get(raw_type, "Task") + + description = str(payload.get("description") or payload.get("body") or "").strip() + description_format = str(payload.get("description_format") or "markdown").strip().lower() + field_rendering_format = "Markdown" if description_format != "classic" else "Html" + patch_document: list[dict[str, Any]] = [ + {"op": "add", "path": "/fields/System.Title", "value": title}, + {"op": "add", "path": "/fields/System.Description", "value": description}, + {"op": "add", "path": "/multilineFieldsFormat/System.Description", "value": field_rendering_format}, + ] + + acceptance_criteria = str(payload.get("acceptance_criteria") or "").strip() + if acceptance_criteria: + patch_document.append( + { + "op": "add", + "path": "/fields/Microsoft.VSTS.Common.AcceptanceCriteria", + "value": acceptance_criteria, + } + ) + + priority = payload.get("priority") + if priority not in (None, ""): + patch_document.append( + { + "op": "add", + "path": "/fields/Microsoft.VSTS.Common.Priority", + "value": priority, + } + ) + + story_points = payload.get("story_points") + if story_points is not None: + patch_document.append( + { + "op": "add", + "path": "/fields/Microsoft.VSTS.Scheduling.StoryPoints", + "value": story_points, + } + ) + + sprint = str(payload.get("sprint") or "").strip() + if sprint: + patch_document.append( + { + "op": "add", + "path": "/fields/System.IterationPath", + "value": sprint, + } + ) + + parent_id = str(payload.get("parent_id") or "").strip() + if parent_id: + parent_url = f"{self.base_url}/{org}/{project}/_apis/wit/workItems/{parent_id}" + patch_document.append( + { + "op": "add", + "path": "/relations/-", + "value": {"rel": "System.LinkTypes.Hierarchy-Reverse", "url": parent_url}, + } + ) + + url = f"{self.base_url}/{org}/{project}/_apis/wit/workitems/${work_item_type}?api-version=7.1" + headers = { + "Content-Type": "application/json-patch+json", + **self._auth_headers(), + } + response = self._request_with_retry( + lambda: requests.patch(url, json=patch_document, headers=headers, timeout=30), + retry_on_ambiguous_transport=False, + ) + created = response.json() + + created_id = str(created.get("id") or "") + html_url = str(created.get("_links", {}).get("html", {}).get("href") or "") + fallback_url = str(created.get("url") or "") + + return { + "id": created_id, + "key": created_id, + "url": html_url or fallback_url, + } + @beartype @require(lambda project_id: isinstance(project_id, str) and len(project_id) > 0, "project_id must be non-empty") @ensure(lambda result: isinstance(result, list), "Must return list") @@ -3429,8 +3556,9 @@ def update_backlog_item(self, item: BacklogItem, update_fields: list[str] | None # Update work item try: - response = requests.patch(url, headers=headers, json=operations, timeout=30) - response.raise_for_status() + response = self._request_with_retry( + lambda: requests.patch(url, headers=headers, json=operations, timeout=30) + ) except requests.HTTPError as e: user_msg = _log_ado_patch_failure(e.response, operations, url) e.ado_user_message = user_msg diff --git a/src/specfact_cli/adapters/backlog_base.py b/src/specfact_cli/adapters/backlog_base.py index 111fb073..25b94275 100644 --- a/src/specfact_cli/adapters/backlog_base.py +++ b/src/specfact_cli/adapters/backlog_base.py @@ -11,10 +11,12 @@ from __future__ import annotations +import time from abc import ABC, abstractmethod from datetime import UTC, datetime from typing import Any +import requests from beartype import beartype from icontract import ensure, require @@ -35,6 +37,10 @@ class BacklogAdapterMixin(ABC): and implement the abstract methods to provide tool-specific implementations. """ + RETRYABLE_HTTP_STATUSES: tuple[int, ...] = (429, 500, 502, 503, 504) + RETRY_DEFAULT_ATTEMPTS: int = 3 + RETRY_BACKOFF_SECONDS: float = 0.5 + @abstractmethod @beartype @require(lambda status: isinstance(status, str) and len(status) > 0, "Status must be non-empty string") @@ -140,6 +146,71 @@ def map_backlog_state_between_adapters( return target_state + @beartype + @require(lambda attempts: attempts is None or attempts > 0, "attempts must be > 0 when provided") + @require( + lambda backoff_seconds: backoff_seconds is None or backoff_seconds >= 0, + "backoff_seconds must be >= 0 when provided", + ) + @require( + lambda retry_on_ambiguous_transport: isinstance(retry_on_ambiguous_transport, bool), "retry flag must be bool" + ) + @ensure(lambda result: hasattr(result, "raise_for_status"), "Result must support raise_for_status") + def _request_with_retry( + self, + request_callable: Any, + *, + attempts: int | None = None, + backoff_seconds: float | None = None, + retry_on_ambiguous_transport: bool = True, + ) -> Any: + """Execute HTTP request with central retry policy for transient failures. + + For non-idempotent writes, callers can disable transport-error replay by passing + retry_on_ambiguous_transport=False to avoid accidental duplicate side effects. + """ + max_attempts = attempts or self.RETRY_DEFAULT_ATTEMPTS + delay = backoff_seconds if backoff_seconds is not None else self.RETRY_BACKOFF_SECONDS + + last_error: Exception | None = None + for attempt in range(1, max_attempts + 1): + try: + response = request_callable() + status_code = int(getattr(response, "status_code", 0) or 0) + if status_code in self.RETRYABLE_HTTP_STATUSES and attempt < max_attempts: + time.sleep(delay * (2 ** (attempt - 1))) + continue + response.raise_for_status() + return response + except requests.HTTPError as error: + status_code = int(getattr(error.response, "status_code", 0) or 0) + is_transient = status_code in self.RETRYABLE_HTTP_STATUSES + last_error = error + if is_transient and attempt < max_attempts: + time.sleep(delay * (2 ** (attempt - 1))) + continue + raise + except (requests.Timeout, requests.ConnectionError) as error: + last_error = error + if retry_on_ambiguous_transport and attempt < max_attempts: + time.sleep(delay * (2 ** (attempt - 1))) + continue + raise + + if last_error is not None: + raise last_error + raise RuntimeError("Retry logic failed without response or error") + + @abstractmethod + @beartype + @require( + lambda project_id: isinstance(project_id, str) and len(project_id.strip()) > 0, "Project ID must be non-empty" + ) + @require(lambda payload: isinstance(payload, dict), "Payload must be dict") + @ensure(lambda result: isinstance(result, dict), "Must return created issue metadata dict") + def create_issue(self, project_id: str, payload: dict[str, Any]) -> dict[str, Any]: + """Create backlog issue/work item from provider-agnostic payload.""" + @abstractmethod @beartype @require(lambda item_data: isinstance(item_data, dict), "Item data must be dict") diff --git a/src/specfact_cli/adapters/github.py b/src/specfact_cli/adapters/github.py index 8296a86b..0d74b7c7 100644 --- a/src/specfact_cli/adapters/github.py +++ b/src/specfact_cli/adapters/github.py @@ -1161,8 +1161,10 @@ def _create_issue_from_proposal( payload["state_reason"] = state_reason try: - response = requests.post(url, json=payload, headers=headers, timeout=30) - response.raise_for_status() + response = self._request_with_retry( + lambda: requests.post(url, json=payload, headers=headers, timeout=30), + retry_on_ambiguous_transport=False, + ) issue_data = response.json() # If issue was created as closed, add a comment explaining why @@ -1281,8 +1283,7 @@ def _update_issue_status( payload["state_reason"] = state_reason try: - response = requests.patch(url, json=payload, headers=headers, timeout=30) - response.raise_for_status() + response = self._request_with_retry(lambda: requests.patch(url, json=payload, headers=headers, timeout=30)) issue_data = response.json() # Add comment explaining status change @@ -1346,8 +1347,10 @@ def _add_issue_comment(self, repo_owner: str, repo_name: str, issue_number: int, payload = {"body": comment} try: - response = requests.post(url, json=payload, headers=headers, timeout=30) - response.raise_for_status() + self._request_with_retry( + lambda: requests.post(url, json=payload, headers=headers, timeout=30), + retry_on_ambiguous_transport=False, + ) except requests.RequestException as e: # Log but don't fail - comment is non-critical console.print(f"[yellow]⚠[/yellow] Failed to add comment to issue #{issue_number}: {e}") @@ -1509,8 +1512,7 @@ def _update_issue_body( payload["state_reason"] = state_reason try: - response = requests.patch(url, json=payload, headers=headers, timeout=30) - response.raise_for_status() + response = self._request_with_retry(lambda: requests.patch(url, json=payload, headers=headers, timeout=30)) issue_data = response.json() # Add comment if issue was closed due to status change, or if already closed with applied status @@ -1684,8 +1686,7 @@ def sync_status_to_github( patch_url = f"{self.base_url}/repos/{repo_owner}/{repo_name}/issues/{issue_number}" patch_payload = {"labels": all_labels} - patch_response = requests.patch(patch_url, json=patch_payload, headers=headers, timeout=30) - patch_response.raise_for_status() + self._request_with_retry(lambda: requests.patch(patch_url, json=patch_payload, headers=headers, timeout=30)) return { "issue_number": current_issue.get("number", issue_number), # Use API response number (int) @@ -2639,6 +2640,250 @@ def fetch_backlog_items(self, filters: BacklogFilters) -> list[BacklogItem]: return filtered_items + @beartype + def _github_graphql(self, query: str, variables: dict[str, Any]) -> dict[str, Any]: + """Execute GitHub GraphQL request and return `data` payload.""" + headers = { + "Authorization": f"token {self.api_token}", + "Accept": "application/vnd.github+json", + } + response = self._request_with_retry( + lambda: requests.post( + f"{self.base_url}/graphql", + json={"query": query, "variables": variables}, + headers=headers, + timeout=30, + ) + ) + payload = response.json() + if not isinstance(payload, dict): + raise ValueError("GitHub GraphQL response must be an object") + errors = payload.get("errors") + if isinstance(errors, list) and errors: + raise ValueError(f"GitHub GraphQL errors: {errors}") + data = payload.get("data") + return data if isinstance(data, dict) else {} + + @beartype + def _try_set_github_issue_type( + self, + issue_node_id: str, + issue_type: str, + provider_fields: dict[str, Any] | None, + ) -> None: + """Best-effort GitHub issue type update using repository issue-type ids.""" + if not issue_node_id or not isinstance(provider_fields, dict): + return + + issue_cfg = provider_fields.get("github_issue_types") + if not isinstance(issue_cfg, dict): + return + type_ids = issue_cfg.get("type_ids") + if not isinstance(type_ids, dict): + return + + issue_type_id = str(type_ids.get(issue_type) or type_ids.get(issue_type.lower()) or "").strip() + if not issue_type_id: + return + + mutation = ( + "mutation($issueId: ID!, $issueTypeId: ID!) { " + "updateIssue(input: {id: $issueId, issueTypeId: $issueTypeId}) { issue { id } } " + "}" + ) + try: + self._github_graphql( + mutation, + {"issueId": issue_node_id, "issueTypeId": issue_type_id}, + ) + except (requests.RequestException, ValueError) as error: + console.print(f"[yellow]⚠[/yellow] Could not set GitHub issue Type automatically: {error}") + + @beartype + def _try_link_github_sub_issue( + self, + owner: str, + repo: str, + parent_ref: Any, + sub_issue_node_id: str, + ) -> None: + """Best-effort native GitHub parent/sub-issue link using sidebar relationship.""" + if not sub_issue_node_id: + return + + parent_raw = str(parent_ref or "").strip() + if not parent_raw: + return + + parent_number_text = parent_raw.removeprefix("#") + if not parent_number_text.isdigit(): + return + parent_number = int(parent_number_text) + + parent_query = ( + "query($owner:String!, $repo:String!, $number:Int!) { " + "repository(owner:$owner, name:$repo) { issue(number:$number) { id } } " + "}" + ) + link_mutation = ( + "mutation($parentIssueId:ID!, $subIssueId:ID!) { " + "addSubIssue(input:{ issueId:$parentIssueId, subIssueId:$subIssueId, replaceParent:true }) { " + "issue { id } subIssue { id } " + "} " + "}" + ) + + try: + parent_data = self._github_graphql( + parent_query, + {"owner": owner, "repo": repo, "number": parent_number}, + ) + repository = parent_data.get("repository") if isinstance(parent_data, dict) else None + issue = repository.get("issue") if isinstance(repository, dict) else None + parent_issue_id = str(issue.get("id") or "").strip() if isinstance(issue, dict) else "" + if not parent_issue_id: + return + self._github_graphql( + link_mutation, + {"parentIssueId": parent_issue_id, "subIssueId": sub_issue_node_id}, + ) + except (requests.RequestException, ValueError) as error: + console.print(f"[yellow]⚠[/yellow] Could not create native GitHub parent/sub-issue link: {error}") + + def _try_set_github_project_type_field( + self, + issue_node_id: str, + issue_type: str, + provider_fields: dict[str, Any] | None, + ) -> None: + """Best-effort GitHub Projects v2 Type field update for created issues.""" + if not issue_node_id or not isinstance(provider_fields, dict): + return + + project_cfg = provider_fields.get("github_project_v2") + if not isinstance(project_cfg, dict): + return + + project_id = str(project_cfg.get("project_id") or "").strip() + type_field_id = str(project_cfg.get("type_field_id") or "").strip() + option_map = project_cfg.get("type_option_ids") + if not isinstance(option_map, dict): + return + + option_id = str(option_map.get(issue_type) or option_map.get(issue_type.lower()) or "").strip() + if not project_id or not type_field_id or not option_id: + return + + add_item_mutation = ( + "mutation($projectId: ID!, $contentId: ID!) { " + "addProjectV2ItemById(input: {projectId: $projectId, contentId: $contentId}) { item { id } }" + " }" + ) + set_type_mutation = ( + "mutation($projectId: ID!, $itemId: ID!, $fieldId: ID!, $optionId: String!) { " + "updateProjectV2ItemFieldValue(input: {" + "projectId: $projectId, itemId: $itemId, fieldId: $fieldId, " + "value: { singleSelectOptionId: $optionId }" + "}) { projectV2Item { id } }" + " }" + ) + + try: + add_data = self._github_graphql( + add_item_mutation, + {"projectId": project_id, "contentId": issue_node_id}, + ) + add_result = add_data.get("addProjectV2ItemById") if isinstance(add_data, dict) else None + item = add_result.get("item") if isinstance(add_result, dict) else None + item_id = str(item.get("id") or "").strip() if isinstance(item, dict) else "" + if not item_id: + return + self._github_graphql( + set_type_mutation, + { + "projectId": project_id, + "itemId": item_id, + "fieldId": type_field_id, + "optionId": option_id, + }, + ) + except (requests.RequestException, ValueError) as error: + console.print(f"[yellow]⚠[/yellow] Could not set GitHub Projects Type field automatically: {error}") + + @beartype + @require( + lambda project_id: isinstance(project_id, str) and len(project_id.strip()) > 0, "project_id must be non-empty" + ) + @require(lambda payload: isinstance(payload, dict), "payload must be dict") + @ensure(lambda result: isinstance(result, dict), "Must return dict") + def create_issue(self, project_id: str, payload: dict[str, Any]) -> dict[str, Any]: + """Create a GitHub issue from provider-agnostic backlog payload.""" + owner, repo = project_id.split("/", 1) if "/" in project_id else (self.repo_owner, self.repo_name) + if not owner or not repo: + raise ValueError( + "GitHub project_id must be '/' or adapter must be configured with repo_owner/repo_name" + ) + if not self.api_token: + raise ValueError("GitHub API token required to create issues") + + title = str(payload.get("title") or "").strip() + if not title: + raise ValueError("payload.title is required") + + issue_type = str(payload.get("type") or "task").strip().lower() + description_format = str(payload.get("description_format") or "markdown").strip().lower() + body = str(payload.get("description") or payload.get("body") or "").strip() + + acceptance_criteria = str(payload.get("acceptance_criteria") or "").strip() + if acceptance_criteria: + if description_format == "classic": + body = f"{body}\n\nAcceptance Criteria:\n{acceptance_criteria}".strip() + else: + body = f"{body}\n\n## Acceptance Criteria\n{acceptance_criteria}".strip() + + parent_id = payload.get("parent_id") + if parent_id: + parent_line = f"Parent: #{parent_id}" + body = f"{body}\n\n{parent_line}".strip() if body else parent_line + + labels = [issue_type] if issue_type else [] + priority = str(payload.get("priority") or "").strip() + if priority: + labels.append(f"priority:{priority.lower()}") + story_points = payload.get("story_points") + if story_points is not None: + labels.append(f"story-points:{story_points}") + url = f"{self.base_url}/repos/{owner}/{repo}/issues" + headers = { + "Authorization": f"token {self.api_token}", + "Accept": "application/vnd.github.v3+json", + } + response = self._request_with_retry( + lambda: requests.post( + url, + json={"title": title, "body": body, "labels": labels}, + headers=headers, + timeout=30, + ), + retry_on_ambiguous_transport=False, + ) + created = response.json() + issue_node_id = str(created.get("node_id") or "").strip() + if parent_id: + self._try_link_github_sub_issue(owner, repo, parent_id, issue_node_id) + + provider_fields = payload.get("provider_fields") + if isinstance(provider_fields, dict): + self._try_set_github_issue_type(issue_node_id, issue_type, provider_fields) + self._try_set_github_project_type_field(issue_node_id, issue_type, provider_fields) + + canonical_issue_number = str(created.get("number") or created.get("id") or "") + return { + "id": canonical_issue_number, + "key": canonical_issue_number, + "url": str(created.get("html_url") or created.get("url") or ""), + } + @beartype @require(lambda project_id: isinstance(project_id, str) and len(project_id) > 0, "project_id must be non-empty") @ensure(lambda result: isinstance(result, list), "Must return list") @@ -2770,6 +3015,13 @@ def _normalize(raw_value: str) -> str | None: mapped = _normalize(value) if mapped: return mapped + if isinstance(value, dict): + for candidate_key in ("name", "title"): + candidate_value = value.get(candidate_key) + if isinstance(candidate_value, str): + mapped = _normalize(candidate_value) + if mapped: + return mapped tags = issue_payload.get("tags") if isinstance(tags, list): @@ -2962,8 +3214,7 @@ def update_backlog_item(self, item: BacklogItem, update_fields: list[str] | None payload["state"] = item.state # Update issue - response = requests.patch(url, headers=headers, json=payload, timeout=30) - response.raise_for_status() + response = self._request_with_retry(lambda: requests.patch(url, headers=headers, json=payload, timeout=30)) updated_issue = response.json() # Convert back to BacklogItem diff --git a/src/specfact_cli/backlog/filters.py b/src/specfact_cli/backlog/filters.py index 0420f4ba..41411421 100644 --- a/src/specfact_cli/backlog/filters.py +++ b/src/specfact_cli/backlog/filters.py @@ -42,6 +42,8 @@ class BacklogFilters: """Filter by release identifier.""" limit: int | None = None """Maximum number of items to fetch (applied after filtering).""" + use_current_iteration_default: bool = True + """When sprint is omitted, whether provider may auto-resolve current iteration.""" @staticmethod def normalize_filter_value(value: str | None) -> str | None: diff --git a/src/specfact_cli/backlog/mappers/github_mapper.py b/src/specfact_cli/backlog/mappers/github_mapper.py index 7b09645d..b29ff360 100644 --- a/src/specfact_cli/backlog/mappers/github_mapper.py +++ b/src/specfact_cli/backlog/mappers/github_mapper.py @@ -43,7 +43,8 @@ def extract_fields(self, item_data: dict[str, Any]) -> dict[str, Any]: Dict mapping canonical field names to extracted values """ body = item_data.get("body", "") or "" - labels = item_data.get("labels", []) + labels_raw = item_data.get("labels", []) + labels = labels_raw if isinstance(labels_raw, list) else [] label_names = [label.get("name", "") if isinstance(label, dict) else str(label) for label in labels if label] fields: dict[str, Any] = {} @@ -211,23 +212,44 @@ def _extract_numeric_field(self, body: str, field_name: str) -> int | None: Returns: Numeric value or None if not found """ - # Pattern 1: ## Field Name\n\n - section_pattern = rf"^##+\s+{re.escape(field_name)}\s*$\n\s*(\d+)" - match = re.search(section_pattern, body, re.MULTILINE) - if match: - try: - return int(match.group(1)) - except (ValueError, IndexError): - pass - - # Pattern 2: **Field Name:** - inline_pattern = rf"\*\*{re.escape(field_name)}:\*\*\s*(\d+)" - match = re.search(inline_pattern, body, re.IGNORECASE) - if match: - try: - return int(match.group(1)) - except (ValueError, IndexError): - pass + normalized_field = field_name.strip().lower() + if not normalized_field: + return None + + lines = body.splitlines() + + # Pattern 1: markdown section heading followed by a numeric line. + for idx, raw_line in enumerate(lines): + line = raw_line.strip() + if not line.startswith("##"): + continue + heading = line.lstrip("#").strip().lower() + if heading != normalized_field: + continue + for next_line in lines[idx + 1 :]: + candidate = next_line.strip() + if not candidate: + continue + match = re.match(r"^(\d+)", candidate) + if match: + try: + return int(match.group(1)) + except (ValueError, IndexError): + return None + break + + # Pattern 2: inline markdown label, e.g. **Field Name:** 8 + inline_prefix = f"**{normalized_field}:**" + for raw_line in lines: + line = raw_line.strip() + if line.lower().startswith(inline_prefix): + remainder = line[len(inline_prefix) :].strip() + match = re.match(r"^(\d+)", remainder) + if match: + try: + return int(match.group(1)) + except (ValueError, IndexError): + return None return None @@ -268,7 +290,12 @@ def _extract_work_item_type(self, label_names: list[str], item_data: dict[str, A # Check issue type metadata if available issue_type = item_data.get("issue_type") or item_data.get("type") - if issue_type: - return str(issue_type) + if isinstance(issue_type, str) and issue_type.strip(): + return issue_type.strip() + if isinstance(issue_type, dict): + for key in ("name", "title"): + candidate = issue_type.get(key) + if isinstance(candidate, str) and candidate.strip(): + return candidate.strip() return None diff --git a/src/specfact_cli/modules/analyze/module-package.yaml b/src/specfact_cli/modules/analyze/module-package.yaml index b9b23efb..7bd869eb 100644 --- a/src/specfact_cli/modules/analyze/module-package.yaml +++ b/src/specfact_cli/modules/analyze/module-package.yaml @@ -1,5 +1,5 @@ name: analyze -version: 0.35.0 +version: 0.36.0 commands: - analyze command_help: diff --git a/src/specfact_cli/modules/auth/module-package.yaml b/src/specfact_cli/modules/auth/module-package.yaml index 161447b1..495ed7ae 100644 --- a/src/specfact_cli/modules/auth/module-package.yaml +++ b/src/specfact_cli/modules/auth/module-package.yaml @@ -1,5 +1,5 @@ name: auth -version: 0.35.0 +version: 0.36.0 commands: - auth command_help: diff --git a/src/specfact_cli/modules/auth/src/commands.py b/src/specfact_cli/modules/auth/src/commands.py index 763894c3..9b8fa6f9 100644 --- a/src/specfact_cli/modules/auth/src/commands.py +++ b/src/specfact_cli/modules/auth/src/commands.py @@ -42,7 +42,7 @@ AZURE_DEVOPS_SCOPES = [AZURE_DEVOPS_RESOURCE] DEFAULT_GITHUB_BASE_URL = "https://github.com" DEFAULT_GITHUB_API_URL = "https://api.github.com" -DEFAULT_GITHUB_SCOPES = "repo" +DEFAULT_GITHUB_SCOPES = "repo read:project project" DEFAULT_GITHUB_CLIENT_ID = "Ov23lizkVHsbEIjZKvRD" @@ -589,7 +589,7 @@ def auth_github( scopes: str = typer.Option( DEFAULT_GITHUB_SCOPES, "--scopes", - help="OAuth scopes (comma or space separated)", + help="OAuth scopes (comma or space separated). Default: repo,read:project,project", hidden=True, ), ) -> None: diff --git a/src/specfact_cli/modules/backlog/module-package.yaml b/src/specfact_cli/modules/backlog/module-package.yaml index 4b35d540..229c2fc1 100644 --- a/src/specfact_cli/modules/backlog/module-package.yaml +++ b/src/specfact_cli/modules/backlog/module-package.yaml @@ -1,5 +1,5 @@ name: backlog -version: 0.35.0 +version: 0.36.0 commands: - backlog command_help: diff --git a/src/specfact_cli/modules/backlog/src/commands.py b/src/specfact_cli/modules/backlog/src/commands.py index d03aea1b..28494b46 100644 --- a/src/specfact_cli/modules/backlog/src/commands.py +++ b/src/specfact_cli/modules/backlog/src/commands.py @@ -69,6 +69,7 @@ class _BacklogCommandGroup(TyperGroup): # Compatibility / lower-frequency commands later. "refine": 100, "daily": 110, + "init-config": 118, "map-fields": 120, } @@ -476,6 +477,85 @@ def _load_backlog_config() -> dict[str, Any]: return config +@beartype +def _load_backlog_module_config_file() -> tuple[dict[str, Any], Path]: + """Load canonical backlog module config from `.specfact/backlog-config.yaml`.""" + config_dir = os.environ.get("SPECFACT_CONFIG_DIR") + search_paths: list[Path] = [] + if config_dir: + search_paths.append(Path(config_dir)) + search_paths.append(Path.cwd() / ".specfact") + + for base in search_paths: + path = base / "backlog-config.yaml" + if path.is_file(): + try: + data = yaml.safe_load(path.read_text(encoding="utf-8")) or {} + if isinstance(data, dict): + return data, path + except Exception as exc: + debug_log_operation("config_load", str(path), "error", error=repr(exc)) + return {}, path + + default_path = search_paths[-1] / "backlog-config.yaml" + return {}, default_path + + +@beartype +def _save_backlog_module_config_file(config: dict[str, Any], path: Path) -> None: + """Persist canonical backlog module config to `.specfact/backlog-config.yaml`.""" + path.parent.mkdir(parents=True, exist_ok=True) + path.write_text(yaml.dump(config, sort_keys=False), encoding="utf-8") + + +@beartype +def _upsert_backlog_provider_settings( + provider: str, + settings_update: dict[str, Any], + *, + project_id: str | None = None, + adapter: str | None = None, +) -> Path: + """Merge provider settings into `.specfact/backlog-config.yaml` and save.""" + cfg, path = _load_backlog_module_config_file() + backlog_config = cfg.get("backlog_config") + if not isinstance(backlog_config, dict): + backlog_config = {} + providers = backlog_config.get("providers") + if not isinstance(providers, dict): + providers = {} + + provider_cfg = providers.get(provider) + if not isinstance(provider_cfg, dict): + provider_cfg = {} + + if adapter: + provider_cfg["adapter"] = adapter + if project_id: + provider_cfg["project_id"] = project_id + + settings = provider_cfg.get("settings") + if not isinstance(settings, dict): + settings = {} + + def _deep_merge(dst: dict[str, Any], src: dict[str, Any]) -> dict[str, Any]: + for key, value in src.items(): + if isinstance(value, dict) and isinstance(dst.get(key), dict): + _deep_merge(dst[key], value) + else: + dst[key] = value + return dst + + _deep_merge(settings, settings_update) + provider_cfg["settings"] = settings + providers[provider] = provider_cfg + backlog_config["providers"] = providers + cfg["backlog_config"] = backlog_config + + _save_backlog_module_config_file(cfg, path) + return path + + @beartype def _resolve_standup_options( cli_state: str | None, @@ -3767,23 +3847,75 @@ def _on_write_comment_progress(index: int, total: int, item: BacklogItem) -> Non raise typer.Exit(1) from e +@app.command("init-config") +@beartype +def init_config( + force: bool = typer.Option(False, "--force", help="Overwrite existing .specfact/backlog-config.yaml"), +) -> None: + """Scaffold `.specfact/backlog-config.yaml` with default backlog provider config structure.""" + cfg, path = _load_backlog_module_config_file() + if path.exists() and not force: + console.print(f"[yellow]⚠[/yellow] Config already exists: {path}") + console.print("[dim]Use --force to overwrite or run `specfact backlog map-fields` to update mappings.[/dim]") + return + + default_config: dict[str, Any] = { + "backlog_config": { + "providers": { + "github": { + "adapter": "github", + "project_id": "", + "settings": { + "github_issue_types": { + "type_ids": {}, + } + }, + }, + "ado": { + "adapter": "ado", + "project_id": "", + "settings": { + "field_mapping_file": ".specfact/templates/backlog/field_mappings/ado_custom.yaml", + }, + }, + } + } + } + + if cfg and not force: + # unreachable due earlier return, keep for safety + default_config = cfg + + _save_backlog_module_config_file(default_config if force or not cfg else cfg, path) + console.print(f"[green]✓[/green] Backlog config initialized: {path}") + console.print("[dim]Next: run `specfact backlog map-fields` to configure provider mappings.[/dim]") + + @app.command("map-fields") -@require( - lambda ado_org, ado_project: ( - isinstance(ado_org, str) and len(ado_org) > 0 and isinstance(ado_project, str) and len(ado_project) > 0 - ), - "ADO org and project must be non-empty strings", -) @beartype def map_fields( - ado_org: str = typer.Option(..., "--ado-org", help="Azure DevOps organization (required)"), - ado_project: str = typer.Option(..., "--ado-project", help="Azure DevOps project (required)"), + ado_org: str | None = typer.Option(None, "--ado-org", help="Azure DevOps organization"), + ado_project: str | None = typer.Option(None, "--ado-project", help="Azure DevOps project"), ado_token: str | None = typer.Option( None, "--ado-token", help="Azure DevOps PAT (optional, uses AZURE_DEVOPS_TOKEN env var if not provided)" ), ado_base_url: str | None = typer.Option( None, "--ado-base-url", help="Azure DevOps base URL (defaults to https://dev.azure.com)" ), + provider: list[str] = typer.Option( + [], "--provider", help="Provider(s) to configure: ado, github (repeatable)", show_default=False + ), + github_project_id: str | None = typer.Option(None, "--github-project-id", help="GitHub owner/repo context"), + github_project_v2_id: str | None = typer.Option(None, "--github-project-v2-id", help="GitHub ProjectV2 node ID"), + github_type_field_id: str | None = typer.Option( + None, "--github-type-field-id", help="GitHub ProjectV2 Type field ID" + ), + github_type_option: list[str] = typer.Option( + [], + "--github-type-option", + help="Type mapping entry '=' (repeatable, e.g. --github-type-option task=OPT123)", + show_default=False, + ), reset: bool = typer.Option( False, "--reset", help="Reset custom field mapping to defaults (deletes ado_custom.yaml)" ), @@ -3808,6 +3940,547 @@ def map_fields( from specfact_cli.backlog.mappers.template_config import FieldMappingConfig from specfact_cli.utils.auth_tokens import get_token + def _normalize_provider_selection(raw: Any) -> list[str]: + alias_map = { + "ado": "ado", + "azure devops": "ado", + "azure dev ops": "ado", + "azure dev-ops": "ado", + "azure_devops": "ado", + "azure_dev-ops": "ado", + "github": "github", + } + + def _normalize_item(item: Any) -> str | None: + candidate: Any = item + if isinstance(item, dict) and "value" in item: + candidate = item.get("value") + elif hasattr(item, "value"): + candidate = item.value + + text_item = str(candidate or "").strip().lower() + if not text_item: + return None + if text_item in {"done", "finish", "finished"}: + return None + + cleaned = text_item.replace("(", " ").replace(")", " ").replace("-", " ").replace("_", " ") + cleaned = " ".join(cleaned.split()) + + mapped = alias_map.get(text_item) or alias_map.get(cleaned) + if mapped: + return mapped + + # Last-resort parser for stringified choice objects containing value='ado' / value='github'. + if "value='ado'" in text_item or 'value="ado"' in text_item: + return "ado" + if "value='github'" in text_item or 'value="github"' in text_item: + return "github" + + return None + + normalized: list[str] = [] + if isinstance(raw, list): + for item in raw: + mapped = _normalize_item(item) + if mapped and mapped not in normalized: + normalized.append(mapped) + return normalized + + if isinstance(raw, str): + for part in raw.replace(";", ",").split(","): + mapped = _normalize_item(part) + if mapped and mapped not in normalized: + normalized.append(mapped) + return normalized + + mapped = _normalize_item(raw) + return [mapped] if mapped else [] + + selected_providers = _normalize_provider_selection(provider) + if not selected_providers: + # Preserve historical behavior for existing explicit provider options. + if ado_org or ado_project or ado_token: + selected_providers = ["ado"] + elif github_project_id or github_project_v2_id or github_type_field_id or github_type_option: + selected_providers = ["github"] + else: + try: + import questionary # type: ignore[reportMissingImports] + + picked = questionary.checkbox( + "Select providers to configure", + choices=[ + questionary.Choice(title="Azure DevOps", value="ado"), + questionary.Choice(title="GitHub", value="github"), + ], + ).ask() + selected_providers = _normalize_provider_selection(picked) + if not selected_providers: + console.print("[yellow]⚠[/yellow] No providers selected. Aborting.") + raise typer.Exit(1) + except typer.Exit: + raise + except Exception: + selected_raw = typer.prompt("Providers to configure (comma-separated: ado,github)", default="") + selected_providers = _normalize_provider_selection(selected_raw) + + if not selected_providers: + console.print("[red]Error:[/red] Please select at least one provider (ado or github).") + raise typer.Exit(1) + + if any(item not in {"ado", "github"} for item in selected_providers): + console.print("[red]Error:[/red] --provider supports only: ado, github") + raise typer.Exit(1) + + def _persist_github_custom_mapping_file(repo_issue_types: dict[str, str]) -> Path: + """Create or update github_custom.yaml with inferred type/hierarchy mappings.""" + mapping_file = Path.cwd() / ".specfact" / "templates" / "backlog" / "field_mappings" / "github_custom.yaml" + mapping_file.parent.mkdir(parents=True, exist_ok=True) + + default_payload: dict[str, Any] = { + "type_mapping": { + "epic": "epic", + "feature": "feature", + "story": "story", + "task": "task", + "bug": "bug", + "spike": "spike", + }, + "creation_hierarchy": { + "epic": [], + "feature": ["epic"], + "story": ["feature", "epic"], + "task": ["story", "feature"], + "bug": ["story", "feature", "epic"], + "spike": ["feature", "epic"], + "custom": ["epic", "feature", "story"], + }, + "dependency_rules": { + "blocks": "blocks", + "blocked_by": "blocks", + "relates": "relates_to", + }, + "status_mapping": { + "open": "todo", + "closed": "done", + "todo": "todo", + "in progress": "in_progress", + "done": "done", + }, + } + + existing_payload: dict[str, Any] = {} + if mapping_file.exists(): + try: + loaded = yaml.safe_load(mapping_file.read_text(encoding="utf-8")) or {} + if isinstance(loaded, dict): + existing_payload = loaded + except Exception: + existing_payload = {} + + def _deep_merge(dst: dict[str, Any], src: dict[str, Any]) -> dict[str, Any]: + for key, value in src.items(): + if isinstance(value, dict) and isinstance(dst.get(key), dict): + _deep_merge(dst[key], value) + else: + dst[key] = value + return dst + + final_payload = _deep_merge(dict(default_payload), existing_payload) + + alias_to_canonical = { + "epic": "epic", + "feature": "feature", + "story": "story", + "user story": "story", + "task": "task", + "bug": "bug", + "spike": "spike", + "initiative": "epic", + "requirement": "feature", + } + discovered_map: dict[str, str] = {} + existing_type_mapping = final_payload.get("type_mapping") + if isinstance(existing_type_mapping, dict): + for key, value in existing_type_mapping.items(): + discovered_map[str(key)] = str(value) + for raw_type_name in repo_issue_types: + normalized = str(raw_type_name).strip().lower().replace("_", " ").replace("-", " ") + canonical = alias_to_canonical.get(normalized, "custom") + discovered_map.setdefault(normalized, canonical) + final_payload["type_mapping"] = discovered_map + + mapping_file.write_text(yaml.dump(final_payload, sort_keys=False), encoding="utf-8") + return mapping_file + + def _run_github_mapping_setup() -> None: + token = os.environ.get("GITHUB_TOKEN") + if not token: + stored = get_token("github", allow_expired=False) + token = stored.get("access_token") if isinstance(stored, dict) else None + if not token: + console.print("[red]Error:[/red] GitHub token required for github mapping setup") + console.print("[yellow]Use:[/yellow] specfact auth github or set GITHUB_TOKEN") + raise typer.Exit(1) + + def _github_graphql(query: str, variables: dict[str, Any]) -> dict[str, Any]: + response = requests.post( + "https://api.github.com/graphql", + headers={ + "Authorization": f"Bearer {token}", + "Accept": "application/vnd.github+json", + }, + json={"query": query, "variables": variables}, + timeout=30, + ) + response.raise_for_status() + payload = response.json() + if not isinstance(payload, dict): + raise ValueError("Unexpected GitHub GraphQL response payload") + errors = payload.get("errors") + if isinstance(errors, list) and errors: + messages = [str(err.get("message")) for err in errors if isinstance(err, dict) and err.get("message")] + combined = "; ".join(messages) + lower_combined = combined.lower() + if "required scopes" in lower_combined and "read:project" in lower_combined: + raise ValueError( + "GitHub token is missing Projects scopes. Re-authenticate with: " + "specfact auth github --scopes repo,read:project,project" + ) + raise ValueError(combined or "GitHub GraphQL returned errors") + data = payload.get("data") + return data if isinstance(data, dict) else {} + + project_context = (github_project_id or "").strip() or typer.prompt( + "GitHub project context (owner/repo)", default="" + ).strip() + if "/" not in project_context: + console.print("[red]Error:[/red] GitHub project context must be in owner/repo format") + raise typer.Exit(1) + owner, repo_name = project_context.split("/", 1) + owner = owner.strip() + repo_name = repo_name.strip() + console.print( + f"[dim]Hint:[/dim] Open https://github.com/{owner}/{repo_name}/projects and use the project number shown there, " + "or paste a ProjectV2 node ID (PVT_xxx)." + ) + + project_ref = (github_project_v2_id or "").strip() or typer.prompt( + "GitHub ProjectV2 (number like 1, or node ID like PVT_xxx)", default="" + ).strip() + + issue_types_query = ( + "query($owner:String!, $repo:String!){ " + "repository(owner:$owner, name:$repo){ issueTypes(first:50){ nodes{ id name } } } " + "}" + ) + repo_issue_types: dict[str, str] = {} + try: + issue_types_data = _github_graphql(issue_types_query, {"owner": owner, "repo": repo_name}) + repository = ( + issue_types_data.get("repository") if isinstance(issue_types_data.get("repository"), dict) else None + ) + issue_types = repository.get("issueTypes") if isinstance(repository, dict) else None + nodes = issue_types.get("nodes") if isinstance(issue_types, dict) else None + if isinstance(nodes, list): + for node in nodes: + if not isinstance(node, dict): + continue + type_name = str(node.get("name") or "").strip().lower() + type_id = str(node.get("id") or "").strip() + if type_name and type_id: + repo_issue_types[type_name] = type_id + except (requests.RequestException, ValueError): + # Keep flow resilient; ProjectV2 mapping can still be configured without repository issue type ids. + repo_issue_types = {} + + if repo_issue_types: + discovered = ", ".join(sorted(repo_issue_types.keys())) + console.print(f"[cyan]Discovered repository issue types:[/cyan] {discovered}") + + cli_option_map: dict[str, str] = {} + for entry in github_type_option: + raw = entry.strip() + if "=" not in raw: + console.print(f"[yellow]⚠[/yellow] Skipping invalid --github-type-option '{raw}'") + continue + key, value = raw.split("=", 1) + key = key.strip().lower() + value = value.strip() + if key and value: + cli_option_map[key] = value + + # Fast-path for fully specified non-interactive invocations. + if project_ref and (github_type_field_id or "").strip() and cli_option_map: + github_custom_mapping_file = _persist_github_custom_mapping_file(repo_issue_types) + config_path = _upsert_backlog_provider_settings( + "github", + { + "field_mapping_file": ".specfact/templates/backlog/field_mappings/github_custom.yaml", + "provider_fields": { + "github_project_v2": { + "project_id": project_ref, + "type_field_id": str(github_type_field_id).strip(), + "type_option_ids": cli_option_map, + } + }, + "github_issue_types": {"type_ids": repo_issue_types}, + }, + project_id=project_context, + adapter="github", + ) + console.print(f"[green]✓[/green] GitHub ProjectV2 Type mapping saved to {config_path}") + console.print(f"[green]Custom mapping:[/green] {github_custom_mapping_file}") + return + + project_id = "" + project_title = "" + fields_nodes: list[dict[str, Any]] = [] + + def _extract_project(node: dict[str, Any] | None) -> tuple[str, str, list[dict[str, Any]]]: + if not isinstance(node, dict): + return "", "", [] + pid = str(node.get("id") or "").strip() + title = str(node.get("title") or "").strip() + fields = node.get("fields") + nodes = fields.get("nodes") if isinstance(fields, dict) else None + valid_nodes = [item for item in nodes if isinstance(item, dict)] if isinstance(nodes, list) else [] + return pid, title, valid_nodes + + try: + if project_ref.isdigit(): + org_query = ( + "query($login:String!, $number:Int!) { " + "organization(login:$login) { projectV2(number:$number) { id title fields(first:100) { nodes { " + "__typename ... on ProjectV2Field { id name } " + "... on ProjectV2SingleSelectField { id name options { id name } } " + "... on ProjectV2IterationField { id name } " + "} } } } " + "}" + ) + user_query = ( + "query($login:String!, $number:Int!) { " + "user(login:$login) { projectV2(number:$number) { id title fields(first:100) { nodes { " + "__typename ... on ProjectV2Field { id name } " + "... on ProjectV2SingleSelectField { id name options { id name } } " + "... on ProjectV2IterationField { id name } " + "} } } } " + "}" + ) + + number = int(project_ref) + org_error: str | None = None + user_error: str | None = None + + try: + org_data = _github_graphql(org_query, {"login": owner, "number": number}) + org_node = org_data.get("organization") if isinstance(org_data.get("organization"), dict) else None + project_node = org_node.get("projectV2") if isinstance(org_node, dict) else None + project_id, project_title, fields_nodes = _extract_project( + project_node if isinstance(project_node, dict) else None + ) + except ValueError as error: + org_error = str(error) + + if not project_id: + try: + user_data = _github_graphql(user_query, {"login": owner, "number": number}) + user_node = user_data.get("user") if isinstance(user_data.get("user"), dict) else None + project_node = user_node.get("projectV2") if isinstance(user_node, dict) else None + project_id, project_title, fields_nodes = _extract_project( + project_node if isinstance(project_node, dict) else None + ) + except ValueError as error: + user_error = str(error) + + if not project_id and (org_error or user_error): + detail = "; ".join(part for part in [org_error, user_error] if part) + raise ValueError(detail) + else: + project_id = project_ref + query = ( + "query($projectId:ID!) { " + "node(id:$projectId) { " + "... on ProjectV2 { id title fields(first:100) { nodes { " + "__typename ... on ProjectV2Field { id name } " + "... on ProjectV2SingleSelectField { id name options { id name } } " + "... on ProjectV2IterationField { id name } " + "} } } " + "} " + "}" + ) + data = _github_graphql(query, {"projectId": project_id}) + node = data.get("node") if isinstance(data.get("node"), dict) else None + project_id, project_title, fields_nodes = _extract_project(node) + except (requests.RequestException, ValueError) as error: + message = str(error) + console.print(f"[red]Error:[/red] Could not discover GitHub ProjectV2 metadata: {message}") + if "required scopes" in message.lower() or "read:project" in message.lower(): + console.print( + "[yellow]Hint:[/yellow] Run `specfact auth github --scopes repo,read:project,project` " + "or provide `GITHUB_TOKEN` with those scopes." + ) + else: + console.print( + f"[yellow]Hint:[/yellow] Verify the project exists under " + f"https://github.com/{owner}/{repo_name}/projects and that the number/ID is correct." + ) + raise typer.Exit(1) from error + + if not project_id: + console.print( + "[red]Error:[/red] Could not resolve GitHub ProjectV2. Check owner/repo and project number or ID." + ) + raise typer.Exit(1) + + type_field_id = (github_type_field_id or "").strip() + selected_type_field: dict[str, Any] | None = None + single_select_fields = [ + field + for field in fields_nodes + if isinstance(field.get("options"), list) and str(field.get("id") or "").strip() + ] + + expected_type_names = {"epic", "feature", "story", "task", "bug"} + + def _field_options(field: dict[str, Any]) -> set[str]: + raw = field.get("options") + if not isinstance(raw, list): + return set() + return { + str(opt.get("name") or "").strip().lower() + for opt in raw + if isinstance(opt, dict) and str(opt.get("name") or "").strip() + } + + if type_field_id: + selected_type_field = next( + (field for field in single_select_fields if str(field.get("id") or "").strip() == type_field_id), + None, + ) + else: + # Prefer explicit Type-like field names first. + selected_type_field = next( + ( + field + for field in single_select_fields + if str(field.get("name") or "").strip().lower() + in {"type", "issue type", "item type", "work item type"} + ), + None, + ) + # Otherwise pick a field whose options look like backlog item types (epic/feature/story/task/bug). + if selected_type_field is None: + selected_type_field = next( + ( + field + for field in single_select_fields + if len(_field_options(field).intersection(expected_type_names)) >= 2 + ), + None, + ) + + if selected_type_field is None and single_select_fields: + console.print("[cyan]Discovered project single-select fields:[/cyan]") + for field in single_select_fields: + field_name = str(field.get("name") or "") + options_preview = sorted(_field_options(field)) + preview = ", ".join(options_preview[:8]) + suffix = "..." if len(options_preview) > 8 else "" + console.print(f" - {field_name} (id={field.get('id')}) | options: {preview}{suffix}") + # Simplified flow: do not force manual field picking here. + # Repository issue types are source-of-truth; ProjectV2 mapping is optional enrichment. + + if selected_type_field is None: + console.print( + "[yellow]⚠[/yellow] No ProjectV2 Type-like single-select field found. " + "Skipping ProjectV2 type-option mapping for now." + ) + + type_field_id = ( + str(selected_type_field.get("id") or "").strip() if isinstance(selected_type_field, dict) else "" + ) + options_raw = selected_type_field.get("options") if isinstance(selected_type_field, dict) else None + options = [item for item in options_raw if isinstance(item, dict)] if isinstance(options_raw, list) else [] + + option_map: dict[str, str] = dict(cli_option_map) + + option_name_to_id = { + str(opt.get("name") or "").strip().lower(): str(opt.get("id") or "").strip() + for opt in options + if str(opt.get("name") or "").strip() and str(opt.get("id") or "").strip() + } + + if not option_map and option_name_to_id: + for issue_type in ["epic", "feature", "story", "task", "bug"]: + if issue_type in option_name_to_id: + option_map[issue_type] = option_name_to_id[issue_type] + + if not option_map and option_name_to_id: + available_names = ", ".join(sorted(option_name_to_id.keys())) + console.print(f"[cyan]Available Type options:[/cyan] {available_names}") + for issue_type in ["epic", "feature", "story", "task", "bug"]: + option_name = ( + typer.prompt( + f"Type option name for '{issue_type}' (optional)", + default=issue_type if issue_type in option_name_to_id else "", + ) + .strip() + .lower() + ) + if option_name and option_name in option_name_to_id: + option_map[issue_type] = option_name_to_id[option_name] + + issue_type_id_map = { + issue_type: repo_issue_types.get(issue_type, "") + for issue_type in ["epic", "feature", "story", "task", "bug"] + if repo_issue_types.get(issue_type) + } + + settings_update: dict[str, Any] = {} + if issue_type_id_map: + settings_update["github_issue_types"] = {"type_ids": issue_type_id_map} + + if type_field_id and option_map: + settings_update["provider_fields"] = { + "github_project_v2": { + "project_id": project_id, + "type_field_id": type_field_id, + "type_option_ids": option_map, + } + } + elif type_field_id and not option_map: + console.print( + "[yellow]⚠[/yellow] ProjectV2 Type field found, but no matching type options were configured. " + "Repository issue-type ids were still saved." + ) + + if not settings_update: + console.print( + "[red]Error:[/red] Could not resolve GitHub type mappings from repository issue types or ProjectV2 options." + ) + raise typer.Exit(1) + + github_custom_mapping_file = _persist_github_custom_mapping_file(repo_issue_types) + settings_update["field_mapping_file"] = ".specfact/templates/backlog/field_mappings/github_custom.yaml" + + config_path = _upsert_backlog_provider_settings( + "github", + settings_update, + project_id=project_context, + adapter="github", + ) + + project_label = project_title or project_id + console.print(f"[green]✓[/green] GitHub mapping saved to {config_path}") + console.print(f"[green]Custom mapping:[/green] {github_custom_mapping_file}") + if type_field_id: + field_name = str(selected_type_field.get("name") or "") if isinstance(selected_type_field, dict) else "" + console.print(f"[dim]Project: {project_label} | Type field: {field_name}[/dim]") + else: + console.print("[dim]ProjectV2 Type field mapping skipped; repository issue types were captured.[/dim]") + def _find_potential_match(canonical_field: str, available_fields: list[dict[str, Any]]) -> str | None: """ Find a potential ADO field match for a canonical field using regex/fuzzy matching. @@ -3869,6 +4542,10 @@ def _find_potential_match(canonical_field: str, available_fields: list[dict[str, return None + if "ado" not in selected_providers and "github" in selected_providers: + _run_github_mapping_setup() + return + # Resolve token (explicit > env var > stored token) api_token: str | None = None auth_scheme = "basic" @@ -3900,6 +4577,14 @@ def _find_potential_match(canonical_field: str, available_fields: list[dict[str, console.print(" 3. Use: specfact auth azure-devops") raise typer.Exit(1) + if not ado_org: + ado_org = typer.prompt("Azure DevOps organization", default="").strip() or None + if not ado_project: + ado_project = typer.prompt("Azure DevOps project", default="").strip() or None + if not ado_org or not ado_project: + console.print("[red]Error:[/red] Azure DevOps organization and project are required when configuring ado") + raise typer.Exit(1) + # Build base URL base_url = (ado_base_url or "https://dev.azure.com").rstrip("/") @@ -4172,5 +4857,20 @@ def _find_potential_match(canonical_field: str, available_fields: list[dict[str, console.print() console.print(Panel("[bold green]✓ Mapping saved successfully[/bold green]", border_style="green")) console.print(f"[green]Location:[/green] {custom_mapping_file}") + + provider_cfg_path = _upsert_backlog_provider_settings( + "ado", + { + "field_mapping_file": ".specfact/templates/backlog/field_mappings/ado_custom.yaml", + "ado_org": ado_org, + "ado_project": ado_project, + }, + project_id=f"{ado_org}/{ado_project}" if ado_org and ado_project else None, + adapter="ado", + ) + console.print(f"[green]Provider config:[/green] {provider_cfg_path}") console.print() console.print("[dim]You can now use this mapping with specfact backlog refine.[/dim]") + + if "github" in selected_providers: + _run_github_mapping_setup() diff --git a/src/specfact_cli/modules/contract/module-package.yaml b/src/specfact_cli/modules/contract/module-package.yaml index e23f2e5a..a40f8c52 100644 --- a/src/specfact_cli/modules/contract/module-package.yaml +++ b/src/specfact_cli/modules/contract/module-package.yaml @@ -1,5 +1,5 @@ name: contract -version: 0.35.0 +version: 0.36.0 commands: - contract command_help: diff --git a/src/specfact_cli/modules/drift/module-package.yaml b/src/specfact_cli/modules/drift/module-package.yaml index 6ec43c49..9c295903 100644 --- a/src/specfact_cli/modules/drift/module-package.yaml +++ b/src/specfact_cli/modules/drift/module-package.yaml @@ -1,5 +1,5 @@ name: drift -version: 0.35.0 +version: 0.36.0 commands: - drift command_help: diff --git a/src/specfact_cli/modules/enforce/module-package.yaml b/src/specfact_cli/modules/enforce/module-package.yaml index 341f58bf..7ace3b1d 100644 --- a/src/specfact_cli/modules/enforce/module-package.yaml +++ b/src/specfact_cli/modules/enforce/module-package.yaml @@ -1,5 +1,5 @@ name: enforce -version: 0.35.0 +version: 0.36.0 commands: - enforce command_help: diff --git a/src/specfact_cli/modules/generate/module-package.yaml b/src/specfact_cli/modules/generate/module-package.yaml index 2ccbab5e..749d0a87 100644 --- a/src/specfact_cli/modules/generate/module-package.yaml +++ b/src/specfact_cli/modules/generate/module-package.yaml @@ -1,5 +1,5 @@ name: generate -version: 0.35.0 +version: 0.36.0 commands: - generate command_help: diff --git a/src/specfact_cli/modules/import_cmd/module-package.yaml b/src/specfact_cli/modules/import_cmd/module-package.yaml index 7a383556..5e44c0f2 100644 --- a/src/specfact_cli/modules/import_cmd/module-package.yaml +++ b/src/specfact_cli/modules/import_cmd/module-package.yaml @@ -1,5 +1,5 @@ name: import_cmd -version: 0.35.0 +version: 0.36.0 commands: - import command_help: diff --git a/src/specfact_cli/modules/init/module-package.yaml b/src/specfact_cli/modules/init/module-package.yaml index b7174644..6d80b88d 100644 --- a/src/specfact_cli/modules/init/module-package.yaml +++ b/src/specfact_cli/modules/init/module-package.yaml @@ -1,5 +1,5 @@ name: init -version: 0.35.0 +version: 0.36.0 commands: - init command_help: diff --git a/src/specfact_cli/modules/migrate/module-package.yaml b/src/specfact_cli/modules/migrate/module-package.yaml index bebac551..fb4f5425 100644 --- a/src/specfact_cli/modules/migrate/module-package.yaml +++ b/src/specfact_cli/modules/migrate/module-package.yaml @@ -1,5 +1,5 @@ name: migrate -version: 0.35.0 +version: 0.36.0 commands: - migrate command_help: diff --git a/src/specfact_cli/modules/module_registry/module-package.yaml b/src/specfact_cli/modules/module_registry/module-package.yaml index 96f2c0a6..bf08f24d 100644 --- a/src/specfact_cli/modules/module_registry/module-package.yaml +++ b/src/specfact_cli/modules/module_registry/module-package.yaml @@ -1,5 +1,5 @@ name: module-registry -version: 0.35.0 +version: 0.36.0 commands: - module command_help: diff --git a/src/specfact_cli/modules/patch_mode/module-package.yaml b/src/specfact_cli/modules/patch_mode/module-package.yaml index d044a3ec..6357e43c 100644 --- a/src/specfact_cli/modules/patch_mode/module-package.yaml +++ b/src/specfact_cli/modules/patch_mode/module-package.yaml @@ -1,5 +1,5 @@ name: patch-mode -version: 0.35.0 +version: 0.36.0 commands: - patch command_help: diff --git a/src/specfact_cli/modules/plan/module-package.yaml b/src/specfact_cli/modules/plan/module-package.yaml index c75def19..1a4989f4 100644 --- a/src/specfact_cli/modules/plan/module-package.yaml +++ b/src/specfact_cli/modules/plan/module-package.yaml @@ -1,5 +1,5 @@ name: plan -version: 0.35.0 +version: 0.36.0 commands: - plan command_help: diff --git a/src/specfact_cli/modules/policy_engine/module-package.yaml b/src/specfact_cli/modules/policy_engine/module-package.yaml index 8f933789..8b9de5c0 100644 --- a/src/specfact_cli/modules/policy_engine/module-package.yaml +++ b/src/specfact_cli/modules/policy_engine/module-package.yaml @@ -1,5 +1,5 @@ name: policy-engine -version: 0.35.0 +version: 0.36.0 commands: - policy command_help: diff --git a/src/specfact_cli/modules/project/module-package.yaml b/src/specfact_cli/modules/project/module-package.yaml index 7d21c66c..d1f6abea 100644 --- a/src/specfact_cli/modules/project/module-package.yaml +++ b/src/specfact_cli/modules/project/module-package.yaml @@ -1,5 +1,5 @@ name: project -version: 0.35.0 +version: 0.36.0 commands: - project command_help: diff --git a/src/specfact_cli/modules/repro/module-package.yaml b/src/specfact_cli/modules/repro/module-package.yaml index 21d18be5..00ad5965 100644 --- a/src/specfact_cli/modules/repro/module-package.yaml +++ b/src/specfact_cli/modules/repro/module-package.yaml @@ -1,5 +1,5 @@ name: repro -version: 0.35.0 +version: 0.36.0 commands: - repro command_help: diff --git a/src/specfact_cli/modules/sdd/module-package.yaml b/src/specfact_cli/modules/sdd/module-package.yaml index f4b27b73..1b0ef7bd 100644 --- a/src/specfact_cli/modules/sdd/module-package.yaml +++ b/src/specfact_cli/modules/sdd/module-package.yaml @@ -1,5 +1,5 @@ name: sdd -version: 0.35.0 +version: 0.36.0 commands: - sdd command_help: diff --git a/src/specfact_cli/modules/spec/module-package.yaml b/src/specfact_cli/modules/spec/module-package.yaml index 2e7b0a30..86e6393d 100644 --- a/src/specfact_cli/modules/spec/module-package.yaml +++ b/src/specfact_cli/modules/spec/module-package.yaml @@ -1,5 +1,5 @@ name: spec -version: 0.35.0 +version: 0.36.0 commands: - spec command_help: diff --git a/src/specfact_cli/modules/sync/module-package.yaml b/src/specfact_cli/modules/sync/module-package.yaml index 7d730aa7..ae0c4ea5 100644 --- a/src/specfact_cli/modules/sync/module-package.yaml +++ b/src/specfact_cli/modules/sync/module-package.yaml @@ -1,5 +1,5 @@ name: sync -version: 0.35.0 +version: 0.36.0 commands: - sync command_help: diff --git a/src/specfact_cli/modules/upgrade/module-package.yaml b/src/specfact_cli/modules/upgrade/module-package.yaml index 0b695869..263e8f8c 100644 --- a/src/specfact_cli/modules/upgrade/module-package.yaml +++ b/src/specfact_cli/modules/upgrade/module-package.yaml @@ -1,5 +1,5 @@ name: upgrade -version: 0.35.0 +version: 0.36.0 commands: - upgrade command_help: diff --git a/src/specfact_cli/modules/validate/module-package.yaml b/src/specfact_cli/modules/validate/module-package.yaml index 8bcd85e4..add1002c 100644 --- a/src/specfact_cli/modules/validate/module-package.yaml +++ b/src/specfact_cli/modules/validate/module-package.yaml @@ -1,5 +1,5 @@ name: validate -version: 0.35.0 +version: 0.36.0 commands: - validate command_help: diff --git a/src/specfact_cli/registry/module_packages.py b/src/specfact_cli/registry/module_packages.py index f151c182..6fdd87c9 100644 --- a/src/specfact_cli/registry/module_packages.py +++ b/src/specfact_cli/registry/module_packages.py @@ -905,7 +905,8 @@ def register_module_package_commands( cmd_name, ) CommandRegistry._typer_cache.pop(cmd_name, None) - logger.debug("Module %s extended command group '%s'.", meta.name, cmd_name) + if is_debug_mode(): + logger.debug("Module %s extended command group '%s'.", meta.name, cmd_name) continue help_str = (meta.command_help or {}).get(cmd_name) or f"Module package: {meta.name}" loader = _make_package_loader(package_dir, meta.name, cmd_name) diff --git a/tests/integration/backlog/test_additional_commands_e2e.py b/tests/integration/backlog/test_additional_commands_e2e.py index 016049be..a10f366c 100644 --- a/tests/integration/backlog/test_additional_commands_e2e.py +++ b/tests/integration/backlog/test_additional_commands_e2e.py @@ -33,6 +33,10 @@ def fetch_relationships(self, project_id: str) -> list[dict[str, Any]]: _ = project_id return [{"source_id": "1", "target_id": "2", "type": "blocks"}] + def create_issue(self, project_id: str, payload: dict[str, Any]) -> dict[str, Any]: + _ = project_id, payload + return {"id": "3", "key": "TASK-3", "url": "https://example.test/issues/3"} + def _write_baseline(path: Path) -> None: path.parent.mkdir(parents=True, exist_ok=True) diff --git a/tests/integration/backlog/test_ado_e2e.py b/tests/integration/backlog/test_ado_e2e.py index fef97696..af8e67e8 100644 --- a/tests/integration/backlog/test_ado_e2e.py +++ b/tests/integration/backlog/test_ado_e2e.py @@ -38,6 +38,10 @@ def fetch_relationships(self, project_id: str) -> list[dict[str, Any]]: _ = project_id return [{"source_id": "100", "target_id": "101", "type": "blocks"}] + def create_issue(self, project_id: str, payload: dict[str, Any]) -> dict[str, Any]: + _ = project_id, payload + return {"id": "102", "key": "ADO-102", "url": "https://example.test/workitems/102"} + def test_backlog_trace_impact_ado_flow(monkeypatch) -> None: runner = CliRunner() diff --git a/tests/integration/backlog/test_delta_e2e.py b/tests/integration/backlog/test_delta_e2e.py index 21d1cbe5..149b3b43 100644 --- a/tests/integration/backlog/test_delta_e2e.py +++ b/tests/integration/backlog/test_delta_e2e.py @@ -33,6 +33,10 @@ def fetch_relationships(self, project_id: str) -> list[dict[str, Any]]: _ = project_id return [{"source_id": "1", "target_id": "2", "type": "blocks"}] + def create_issue(self, project_id: str, payload: dict[str, Any]) -> dict[str, Any]: + _ = project_id, payload + return {"id": "3", "key": "#3", "url": "https://example.test/issues/3"} + def _write_baseline(path: Path) -> None: path.parent.mkdir(parents=True, exist_ok=True) diff --git a/tests/integration/backlog/test_github_e2e.py b/tests/integration/backlog/test_github_e2e.py index 97c16860..d7baa121 100644 --- a/tests/integration/backlog/test_github_e2e.py +++ b/tests/integration/backlog/test_github_e2e.py @@ -32,6 +32,10 @@ def fetch_relationships(self, project_id: str) -> list[dict[str, Any]]: _ = project_id return [{"source_id": "1", "target_id": "2", "type": "blocks"}] + def create_issue(self, project_id: str, payload: dict[str, Any]) -> dict[str, Any]: + _ = project_id, payload + return {"id": "3", "key": "#3", "url": "https://example.test/issues/3"} + def test_backlog_analyze_deps_github_flow(tmp_path: Path, monkeypatch) -> None: runner = CliRunner() diff --git a/tests/integration/backlog/test_sync_e2e.py b/tests/integration/backlog/test_sync_e2e.py index 6542f57e..db4183ce 100644 --- a/tests/integration/backlog/test_sync_e2e.py +++ b/tests/integration/backlog/test_sync_e2e.py @@ -33,6 +33,10 @@ def fetch_relationships(self, project_id: str) -> list[dict[str, Any]]: _ = project_id return [{"source_id": "1", "target_id": "2", "type": "blocks"}] + def create_issue(self, project_id: str, payload: dict[str, Any]) -> dict[str, Any]: + _ = project_id, payload + return {"id": "3", "key": "#3", "url": "https://example.test/issues/3"} + def test_backlog_sync_generates_plan_and_updates_baseline(tmp_path: Path, monkeypatch) -> None: runner = CliRunner() diff --git a/tests/integration/backlog/test_verify_readiness_e2e.py b/tests/integration/backlog/test_verify_readiness_e2e.py index de31f30b..f2373ad7 100644 --- a/tests/integration/backlog/test_verify_readiness_e2e.py +++ b/tests/integration/backlog/test_verify_readiness_e2e.py @@ -36,6 +36,10 @@ def fetch_relationships(self, project_id: str) -> list[dict[str, Any]]: {"source_id": "101", "target_id": "102", "type": "relates_to"}, ] + def create_issue(self, project_id: str, payload: dict[str, Any]) -> dict[str, Any]: + _ = project_id, payload + return {"id": "103", "key": "A-103", "url": "https://example.test/workitems/103"} + def test_verify_readiness_returns_ready_exit_code(monkeypatch) -> None: runner = CliRunner() diff --git a/tests/unit/backlog/test_field_mappers.py b/tests/unit/backlog/test_field_mappers.py index cac2c0b4..814461fb 100644 --- a/tests/unit/backlog/test_field_mappers.py +++ b/tests/unit/backlog/test_field_mappers.py @@ -192,6 +192,17 @@ def test_extract_work_item_type_from_prefixed_label(self) -> None: fields = mapper.extract_fields(item_data) assert fields["work_item_type"] == "Story" + def test_extract_work_item_type_from_native_type_object(self) -> None: + """GitHub mapper resolves work item type from native issue type metadata.""" + mapper = GitHubFieldMapper() + item_data = { + "body": "test", + "labels": [], + "type": {"name": "Feature"}, + } + fields = mapper.extract_fields(item_data) + assert fields["work_item_type"] == "Feature" + class TestAdoFieldMapper: """Tests for AdoFieldMapper with default mappings.""" diff --git a/tests/unit/commands/test_backlog_commands.py b/tests/unit/commands/test_backlog_commands.py index 61c49246..c9dc574d 100644 --- a/tests/unit/commands/test_backlog_commands.py +++ b/tests/unit/commands/test_backlog_commands.py @@ -6,8 +6,10 @@ from __future__ import annotations +from pathlib import Path from unittest.mock import MagicMock, patch +import yaml from rich.panel import Panel from typer.testing import CliRunner @@ -238,6 +240,134 @@ def test_map_fields_requires_token(self) -> None: assert result.exit_code != 0 assert "token required" in result.stdout.lower() or "error" in result.stdout.lower() + @patch("questionary.checkbox") + @patch("specfact_cli.utils.auth_tokens.get_token") + def test_map_fields_provider_picker_accepts_choice_objects( + self, + mock_get_token: MagicMock, + mock_checkbox: MagicMock, + tmp_path, + ) -> None: + """Provider picker should accept questionary Choice-like objects with `.value`.""" + + class _ChoiceLike: + def __init__(self, value: str) -> None: + self.value = value + + mock_checkbox.return_value.ask.return_value = [_ChoiceLike("github")] + mock_get_token.return_value = {"access_token": "gho_test", "token_type": "bearer"} + + import os + + cwd = Path.cwd() + try: + os.chdir(tmp_path) + result = runner.invoke( + app, + [ + "backlog", + "map-fields", + "--github-project-id", + "nold-ai/specfact-demo-repo", + "--github-project-v2-id", + "PVT_project_id", + "--github-type-field-id", + "PVT_type_field", + "--github-type-option", + "task=OPT_TASK", + ], + ) + finally: + os.chdir(cwd) + + assert result.exit_code == 0 + assert "No providers selected" not in result.stdout + + @patch("specfact_cli.utils.auth_tokens.get_token") + def test_map_fields_github_provider_persists_backlog_config(self, mock_get_token: MagicMock, tmp_path) -> None: + """Test GitHub provider mapping persistence into .specfact/backlog-config.yaml.""" + mock_get_token.return_value = {"access_token": "gho_test", "token_type": "bearer"} + import os + + cwd = Path.cwd() + try: + os.chdir(tmp_path) + result = runner.invoke( + app, + [ + "backlog", + "map-fields", + "--provider", + "github", + "--github-project-id", + "nold-ai/specfact-demo-repo", + "--github-project-v2-id", + "PVT_project_id", + "--github-type-field-id", + "PVT_type_field", + "--github-type-option", + "task=OPT_TASK", + ], + ) + finally: + os.chdir(cwd) + + assert result.exit_code == 0 + cfg_file = tmp_path / ".specfact" / "backlog-config.yaml" + assert cfg_file.exists() + loaded = yaml.safe_load(cfg_file.read_text(encoding="utf-8")) + github_settings = loaded["backlog_config"]["providers"]["github"]["settings"] + mapping = github_settings["provider_fields"]["github_project_v2"] + assert mapping["project_id"] == "PVT_project_id" + assert mapping["type_field_id"] == "PVT_type_field" + assert mapping["type_option_ids"]["task"] == "OPT_TASK" + assert github_settings["field_mapping_file"] == ".specfact/templates/backlog/field_mappings/github_custom.yaml" + github_custom = tmp_path / ".specfact" / "templates" / "backlog" / "field_mappings" / "github_custom.yaml" + assert github_custom.exists() + github_custom_payload = yaml.safe_load(github_custom.read_text(encoding="utf-8")) + assert github_custom_payload["type_mapping"]["task"] == "task" + + def test_backlog_init_config_scaffolds_default_file(self, tmp_path) -> None: + """Test backlog init-config creates default backlog-config scaffold.""" + import os + + cwd = Path.cwd() + try: + os.chdir(tmp_path) + result = runner.invoke(app, ["backlog", "init-config"]) + finally: + os.chdir(cwd) + + assert result.exit_code == 0 + cfg_file = tmp_path / ".specfact" / "backlog-config.yaml" + assert cfg_file.exists() + loaded = yaml.safe_load(cfg_file.read_text(encoding="utf-8")) + assert "backlog_config" in loaded + assert "providers" in loaded["backlog_config"] + assert "github" in loaded["backlog_config"]["providers"] + assert "ado" in loaded["backlog_config"]["providers"] + + def test_backlog_init_config_does_not_overwrite_without_force(self, tmp_path) -> None: + """Test backlog init-config respects no-overwrite behavior by default.""" + import os + + cfg_dir = tmp_path / ".specfact" + cfg_dir.mkdir(parents=True, exist_ok=True) + cfg_file = cfg_dir / "backlog-config.yaml" + cfg_file.write_text("backlog_config:\n providers:\n github:\n adapter: github\n", encoding="utf-8") + + cwd = Path.cwd() + try: + os.chdir(tmp_path) + result = runner.invoke(app, ["backlog", "init-config"]) + finally: + os.chdir(cwd) + + assert result.exit_code == 0 + content = cfg_file.read_text(encoding="utf-8") + assert "adapter: github" in content + assert "already exists" in result.stdout.lower() + class TestParseRefinedExportMarkdown: """Tests for _parse_refined_export_markdown (refine --import-from-tmp parser).""" diff --git a/tests/unit/specfact_cli/adapters/test_adapter_retry_policy_usage.py b/tests/unit/specfact_cli/adapters/test_adapter_retry_policy_usage.py new file mode 100644 index 00000000..7dd2853b --- /dev/null +++ b/tests/unit/specfact_cli/adapters/test_adapter_retry_policy_usage.py @@ -0,0 +1,96 @@ +"""Tests for shared retry policy usage across adapter write operations.""" + +from __future__ import annotations + +from specfact_cli.adapters.ado import AdoAdapter +from specfact_cli.adapters.github import GitHubAdapter + + +class _Resp: + def __init__(self, payload: dict) -> None: + self._payload = payload + + def raise_for_status(self) -> None: + return None + + def json(self) -> dict: + return self._payload + + +def test_github_add_issue_comment_uses_duplicate_safe_retry(monkeypatch) -> None: + adapter = GitHubAdapter(repo_owner="nold-ai", repo_name="specfact-cli", api_token="token", use_gh_cli=False) + + captured: dict[str, object] = {} + + def _capture_retry(_request_callable, **kwargs): + captured.update(kwargs) + return _Resp({}) + + monkeypatch.setattr(adapter, "_request_with_retry", _capture_retry) + + adapter._add_issue_comment("nold-ai", "specfact-cli", 42, "hello") + + assert captured.get("retry_on_ambiguous_transport") is False + + +def test_github_update_issue_status_uses_default_retry_mode(monkeypatch) -> None: + adapter = GitHubAdapter(repo_owner="nold-ai", repo_name="specfact-cli", api_token="token", use_gh_cli=False) + + captured: dict[str, object] = {} + + def _capture_retry(_request_callable, **kwargs): + captured.update(kwargs) + return _Resp({"number": 42, "html_url": "https://example.test/42", "state": "open"}) + + monkeypatch.setattr(adapter, "_request_with_retry", _capture_retry) + monkeypatch.setattr(adapter, "_get_status_comment", lambda *_args, **_kwargs: "") + + proposal_data = { + "status": "in-progress", + "title": "Change title", + "source_tracking": {"source_id": 42}, + } + + result = adapter._update_issue_status(proposal_data, "nold-ai", "specfact-cli") + + assert result["issue_number"] == 42 + assert "retry_on_ambiguous_transport" not in captured + + +def test_ado_add_work_item_comment_uses_duplicate_safe_retry(monkeypatch) -> None: + adapter = AdoAdapter(org="nold-ai", project="specfact-cli", api_token="token") + + captured: dict[str, object] = {} + + def _capture_retry(_request_callable, **kwargs): + captured.update(kwargs) + return _Resp({"id": 7}) + + monkeypatch.setattr(adapter, "_request_with_retry", _capture_retry) + + result = adapter._add_work_item_comment("nold-ai", "specfact-cli", 101, "comment") + + assert result["comment_id"] == 7 + assert captured.get("retry_on_ambiguous_transport") is False + + +def test_ado_update_work_item_status_uses_default_retry_mode(monkeypatch) -> None: + adapter = AdoAdapter(org="nold-ai", project="specfact-cli", api_token="token") + + captured: dict[str, object] = {} + + def _capture_retry(_request_callable, **kwargs): + captured.update(kwargs) + return _Resp({"_links": {"html": {"href": "https://example.test/workitem/101"}}}) + + monkeypatch.setattr(adapter, "_request_with_retry", _capture_retry) + + proposal_data = { + "status": "in-progress", + "source_tracking": {"source_id": 101}, + } + + result = adapter._update_work_item_status(proposal_data, "nold-ai", "specfact-cli") + + assert result["work_item_id"] == 101 + assert "retry_on_ambiguous_transport" not in captured diff --git a/tests/unit/specfact_cli/adapters/test_ado_parent_candidate_filtering.py b/tests/unit/specfact_cli/adapters/test_ado_parent_candidate_filtering.py new file mode 100644 index 00000000..bb557945 --- /dev/null +++ b/tests/unit/specfact_cli/adapters/test_ado_parent_candidate_filtering.py @@ -0,0 +1,76 @@ +"""Regression tests for ADO parent-candidate filtering behavior.""" + +from __future__ import annotations + +from specfact_cli.adapters.ado import AdoAdapter +from specfact_cli.models.backlog_item import BacklogItem + + +def _item(item_id: str, iteration: str | None) -> BacklogItem: + return BacklogItem( + id=item_id, + provider="ado", + url=f"https://example.test/{item_id}", + title=f"Item {item_id}", + state="open", + iteration=iteration, + ) + + +def test_resolve_sprint_filter_skips_implicit_current_iteration_when_disabled(monkeypatch) -> None: + adapter = AdoAdapter(org="nold-ai", project="specfact-cli", api_token="token") + + monkeypatch.setattr(adapter, "_get_current_iteration", lambda: "Project\\Sprint 42") + + items = [_item("1", None), _item("2", "Project\\Sprint 41")] + + resolved, filtered = adapter._resolve_sprint_filter(None, items, apply_current_when_missing=False) + + assert resolved is None + assert [item.id for item in filtered] == ["1", "2"] + + +def test_resolve_sprint_filter_uses_current_iteration_by_default(monkeypatch) -> None: + adapter = AdoAdapter(org="nold-ai", project="specfact-cli", api_token="token") + + monkeypatch.setattr(adapter, "_get_current_iteration", lambda: "Project\\Sprint 42") + + items = [_item("1", None), _item("2", "Project\\Sprint 42"), _item("3", "Project\\Sprint 41")] + + resolved, filtered = adapter._resolve_sprint_filter(None, items, apply_current_when_missing=True) + + assert resolved == "Project\\Sprint 42" + assert [item.id for item in filtered] == ["2"] + + +def test_fetch_backlog_items_wiql_omits_iteration_when_current_default_disabled(monkeypatch) -> None: + import specfact_cli.adapters.ado as ado_module + from specfact_cli.backlog.filters import BacklogFilters + + adapter = AdoAdapter(org="nold-ai", project="specfact-cli", api_token="token") + + captured_query: dict[str, str] = {} + + class _Resp: + status_code = 200 + ok = True + text = "" + + def raise_for_status(self) -> None: + return None + + def json(self) -> dict: + return {"workItems": []} + + def _fake_post(url: str, headers: dict, json: dict, timeout: int): + _ = url, headers, timeout + captured_query["query"] = json.get("query", "") + return _Resp() + + monkeypatch.setattr(adapter, "_get_current_iteration", lambda: r"Project\Sprint 42") + monkeypatch.setattr(ado_module.requests, "post", _fake_post) + + filters = BacklogFilters(use_current_iteration_default=False) + _ = adapter.fetch_backlog_items(filters) + + assert "System.IterationPath" not in captured_query.get("query", "") diff --git a/tests/unit/specfact_cli/adapters/test_backlog_retry.py b/tests/unit/specfact_cli/adapters/test_backlog_retry.py new file mode 100644 index 00000000..ebb7020c --- /dev/null +++ b/tests/unit/specfact_cli/adapters/test_backlog_retry.py @@ -0,0 +1,115 @@ +"""Unit tests for centralized backlog adapter retry behavior.""" + +from __future__ import annotations + +import requests + +from specfact_cli.adapters.backlog_base import BacklogAdapterMixin + + +class _DummyRetryAdapter(BacklogAdapterMixin): + def map_backlog_status_to_openspec(self, status: str) -> str: + return status + + def map_openspec_status_to_backlog(self, status: str) -> str | list[str]: + return status + + def create_issue(self, project_id: str, payload: dict[str, object]) -> dict[str, object]: + _ = project_id, payload + return {} + + def extract_change_proposal_data(self, item_data: dict[str, object]) -> dict[str, object]: + _ = item_data + return {} + + +class _Response: + def __init__(self, status_code: int, payload: dict | None = None) -> None: + self.status_code = status_code + self._payload = payload or {} + + def raise_for_status(self) -> None: + if self.status_code >= 400: + error = requests.HTTPError(f"HTTP {self.status_code}") + error.response = self + raise error + + def json(self) -> dict: + return self._payload + + +def test_request_with_retry_retries_transient_status_then_succeeds(monkeypatch) -> None: + adapter = _DummyRetryAdapter() + monkeypatch.setattr("specfact_cli.adapters.backlog_base.time.sleep", lambda _seconds: None) + + calls = {"count": 0} + + def _request() -> _Response: + calls["count"] += 1 + if calls["count"] < 3: + return _Response(503) + return _Response(200, {"ok": True}) + + response = adapter._request_with_retry(_request) + + assert response.status_code == 200 + assert calls["count"] == 3 + + +def test_request_with_retry_does_not_retry_non_transient_http_error(monkeypatch) -> None: + adapter = _DummyRetryAdapter() + monkeypatch.setattr("specfact_cli.adapters.backlog_base.time.sleep", lambda _seconds: None) + + calls = {"count": 0} + + def _request() -> _Response: + calls["count"] += 1 + return _Response(400) + + try: + adapter._request_with_retry(_request) + except requests.HTTPError as error: + assert error.response is not None + assert error.response.status_code == 400 + else: + raise AssertionError("Expected HTTPError") + + assert calls["count"] == 1 + + +def test_request_with_retry_retries_connection_error_then_succeeds(monkeypatch) -> None: + adapter = _DummyRetryAdapter() + monkeypatch.setattr("specfact_cli.adapters.backlog_base.time.sleep", lambda _seconds: None) + + calls = {"count": 0} + + def _request() -> _Response: + calls["count"] += 1 + if calls["count"] < 2: + raise requests.ConnectionError("network") + return _Response(200) + + response = adapter._request_with_retry(_request) + + assert response.status_code == 200 + assert calls["count"] == 2 + + +def test_request_with_retry_does_not_retry_transport_when_ambiguous_disabled(monkeypatch) -> None: + adapter = _DummyRetryAdapter() + monkeypatch.setattr("specfact_cli.adapters.backlog_base.time.sleep", lambda _seconds: None) + + calls = {"count": 0} + + def _request() -> _Response: + calls["count"] += 1 + raise requests.Timeout("timeout") + + try: + adapter._request_with_retry(_request, retry_on_ambiguous_transport=False) + except requests.Timeout: + pass + else: + raise AssertionError("Expected Timeout") + + assert calls["count"] == 1