From f8925798978b48b0f223bf9ff7d489ac97bf221b Mon Sep 17 00:00:00 2001 From: Zbigniew Sobiecki Date: Wed, 1 Apr 2026 19:12:55 +0200 Subject: [PATCH 01/52] docs(architecture): add comprehensive architecture documentation (#1069) Add a hub-and-spoke architecture documentation suite covering all backend subsystems (excluding tests and dashboard web frontend). The hub document provides a system overview with Mermaid diagrams and links to 10 deep-dive documents covering services, webhook pipeline, triggers, agents, engines, integrations, gadgets, config/credentials, database, and resilience. Also adds 36 unit tests validating doc structure and cross-references, and removes a stale biome suppression comment in project-harness-form.tsx. Co-authored-by: Claude Opus 4.6 (1M context) --- docs/ARCHITECTURE.md | 137 ++++++++++ docs/architecture/01-services.md | 172 ++++++++++++ docs/architecture/02-webhook-pipeline.md | 149 +++++++++++ docs/architecture/03-trigger-system.md | 180 +++++++++++++ docs/architecture/04-agent-system.md | 250 ++++++++++++++++++ docs/architecture/05-engine-backends.md | 154 +++++++++++ docs/architecture/06-integration-layer.md | 173 ++++++++++++ docs/architecture/07-gadgets.md | 119 +++++++++ docs/architecture/08-config-credentials.md | 153 +++++++++++ docs/architecture/09-database.md | 197 ++++++++++++++ docs/architecture/10-resilience.md | 141 ++++++++++ tests/unit/architecture-docs.test.ts | 163 ++++++++++++ .../projects/project-harness-form.tsx | 1 - 13 files changed, 1988 insertions(+), 1 deletion(-) create mode 100644 docs/ARCHITECTURE.md create mode 100644 docs/architecture/01-services.md create mode 100644 docs/architecture/02-webhook-pipeline.md create mode 100644 docs/architecture/03-trigger-system.md create mode 100644 docs/architecture/04-agent-system.md create mode 100644 docs/architecture/05-engine-backends.md create mode 100644 docs/architecture/06-integration-layer.md create mode 100644 docs/architecture/07-gadgets.md create mode 100644 docs/architecture/08-config-credentials.md create mode 100644 docs/architecture/09-database.md create mode 100644 docs/architecture/10-resilience.md create mode 100644 tests/unit/architecture-docs.test.ts diff --git a/docs/ARCHITECTURE.md b/docs/ARCHITECTURE.md new file mode 100644 index 00000000..6562a2ef --- /dev/null +++ b/docs/ARCHITECTURE.md @@ -0,0 +1,137 @@ +# CASCADE Architecture + +CASCADE is a PM-to-Code automation platform that connects project management tools (Trello, JIRA), source control (GitHub), and monitoring (Sentry) to AI-powered agents that autonomously implement features, review PRs, debug failures, and manage backlogs. Webhooks from external providers flow through a router, get queued in Redis, and are processed by ephemeral worker containers that run agents against cloned repositories. + +> **Relationship to CLAUDE.md**: `CLAUDE.md` is the operational reference (commands, env vars, how-to). This document and its deep-dives cover the *system design* — how components fit together and why. + +## System Overview + +```mermaid +graph TB + subgraph External["External Providers"] + Trello + JIRA + GitHub + Sentry + end + + subgraph CASCADE["CASCADE Platform"] + Router["Router :3000
Webhook receiver"] + Redis[(Redis / BullMQ)] + Worker["Worker containers
One job per container"] + Dashboard["Dashboard :3001
API + tRPC"] + DB[(PostgreSQL)] + end + + subgraph Clients + WebUI["Dashboard UI"] + CLI["cascade CLI"] + end + + Trello -->|webhook| Router + JIRA -->|webhook| Router + GitHub -->|webhook| Router + Sentry -->|webhook| Router + + Router -->|enqueue job| Redis + Redis -->|dequeue job| Worker + + Worker -->|PRs, comments| GitHub + Worker -->|status updates| Trello + Worker -->|status updates| JIRA + + Router <--> DB + Worker <--> DB + Dashboard <--> DB + Dashboard <--> Redis + + WebUI <--> Dashboard + CLI <--> Dashboard +``` + +See also: [`docs/architecture.d2`](architecture.d2) for the D2 source diagram. + +## Service Topology + +| Service | Entry Point | Default Port | Responsibility | +|---------|-------------|-------------|----------------| +| **Router** | `src/router/index.ts` | 3000 | Receive webhooks, verify signatures, run trigger dispatch, enqueue jobs to Redis, manage worker containers | +| **Worker** | `src/worker-entry.ts` | N/A (ephemeral) | Process one job per container — run trigger handlers, execute agents, exit on completion | +| **Dashboard** | `src/dashboard.ts` | 3001 | tRPC API for web UI and CLI, session auth, serve frontend static files in self-hosted mode | + +## End-to-End Request Flow + +The canonical path from webhook to pull request: + +```mermaid +sequenceDiagram + participant P as Provider
(Trello/GitHub/JIRA/Sentry) + participant R as Router + participant Q as Redis/BullMQ + participant W as Worker + participant A as Agent Engine + + P->>R: POST /provider/webhook + R->>R: Parse, verify signature, dedup + R->>R: Lookup project, dispatch triggers + R->>R: Check concurrency, post ack comment + R->>Q: Enqueue job + Q->>W: Spawn container with job env vars + W->>W: Bootstrap integrations, dispatch by job type + W->>W: Match trigger, resolve agent definition + W->>A: Execute agent (clone repo, run engine) + A->>A: LLM loop: read, edit, test, commit + A-->>P: Create PR / post comments / update status + W->>W: Finalize run record, cleanup, exit +``` + +## Architectural Patterns + +**Registry pattern** — Integrations, triggers, engines, PM providers, and capabilities all use registries (singleton maps populated at bootstrap). Infrastructure code looks up by key with no provider-specific branching. + +**Capability-driven tool resolution** — Agent YAML definitions declare required capabilities (`fs:read`, `pm:write`, `scm:pr`). At runtime, capabilities are resolved against available integrations to determine which gadgets (tools) the agent receives. + +**Two-tier credential resolution** — In the router and dashboard, credentials are read from the `project_credentials` database table. In workers, the router pre-loads credentials as environment variables to avoid giving workers direct DB access to secrets. + +**Dual-persona GitHub model** — Each project uses two GitHub bot accounts (implementer and reviewer) to prevent feedback loops. Agent type determines which persona token is used. + +**YAML-based agent definitions** — Agents are defined declaratively in YAML files specifying identity, capabilities, triggers, prompts, and lifecycle hooks. Definitions resolve via three tiers: in-memory cache, database, then YAML files on disk. + +**AsyncLocalStorage credential scoping** — Provider clients (GitHub, Trello, JIRA) use Node.js `AsyncLocalStorage` to scope credentials per-request, preventing cross-request credential leakage. + +## Directory Map + +| Directory | Purpose | +|-----------|---------| +| `src/router/` | Webhook receiver, BullMQ producer, worker container management | +| `src/webhook/` | Shared webhook handler factory, parsers, signature verification, logging | +| `src/triggers/` | Event-to-agent routing: TriggerRegistry, TriggerHandler implementations | +| `src/agents/` | Agent definitions (YAML), profiles, capabilities, prompt templates | +| `src/backends/` | LLM execution engines: Claude Code, LLMist, Codex, OpenCode | +| `src/gadgets/` | Tool implementations agents use (file ops, PM, SCM, alerting, shell) | +| `src/integrations/` | Unified integration interfaces, registry, bootstrap | +| `src/pm/` | PM abstraction layer: provider interface, Trello/JIRA adapters, lifecycle | +| `src/github/` | GitHub API client, dual-persona model, PR operations | +| `src/trello/` | Trello API client | +| `src/jira/` | JIRA API client (jira.js wrapper) | +| `src/sentry/` | Sentry API client, alerting integration | +| `src/config/` | Configuration provider, caching, credential resolution, integration roles | +| `src/db/` | Drizzle ORM schema, repositories, migrations | +| `src/api/` | tRPC routers for dashboard API | +| `src/cli/` | Two CLIs: `cascade` (dashboard) and `cascade-tools` (agent tools) | +| `src/utils/` | Logging, repo cloning, lifecycle/watchdog, env scrubbing | +| `src/types/` | Shared TypeScript types | +| `src/queue/` | BullMQ queue helpers | + +## Deep-Dive Documents + +1. [Services and Deployment](./architecture/01-services.md) — Three-service architecture, startup sequences, container model +2. [Webhook Pipeline](./architecture/02-webhook-pipeline.md) — Handler factory, platform adapters, processing pipeline +3. [Trigger System](./architecture/03-trigger-system.md) — TriggerRegistry, handlers, config resolution, context pipeline +4. [Agent System](./architecture/04-agent-system.md) — YAML definitions, profiles, capabilities, prompts, hooks +5. [Engine Backends](./architecture/05-engine-backends.md) — AgentEngine interface, archetypes, execution adapter +6. [Integration Layer](./architecture/06-integration-layer.md) — IntegrationModule, registry, categories, provider implementations +7. [Gadgets](./architecture/07-gadgets.md) — Capability-to-gadget mapping, built-in tools, cascade-tools CLI +8. [Configuration and Credentials](./architecture/08-config-credentials.md) — Config provider, credential resolution, encryption +9. [Database](./architecture/09-database.md) — Schema, ER diagram, repositories, migrations +10. [Resilience](./architecture/10-resilience.md) — Watchdog, concurrency controls, rate limiting, retry, loop prevention diff --git a/docs/architecture/01-services.md b/docs/architecture/01-services.md new file mode 100644 index 00000000..f2769834 --- /dev/null +++ b/docs/architecture/01-services.md @@ -0,0 +1,172 @@ +# Services and Deployment + +CASCADE runs as three independent services. There is no monolithic server mode — each service has a distinct entry point, lifecycle, and scaling model. + +```mermaid +graph LR + subgraph Router["Router Container"] + R_Hono["Hono :3000"] + R_BullMQ["BullMQ Producer"] + R_WM["Worker Manager"] + end + + subgraph Workers["Worker Containers (ephemeral)"] + W1["Worker 1"] + W2["Worker 2"] + WN["Worker N"] + end + + subgraph Dashboard["Dashboard Container"] + D_Hono["Hono :3001"] + D_tRPC["tRPC Router"] + end + + Redis[(Redis)] + DB[(PostgreSQL)] + + R_Hono --> R_BullMQ --> Redis + R_WM --> Workers + Redis --> R_WM + + D_Hono --> D_tRPC + Dashboard <--> DB + Router <--> DB + Workers <--> DB +``` + +## Router + +**Entry point**: `src/router/index.ts` +**Default port**: 3000 + +The router is the webhook ingestion point. It receives HTTP POST requests from external providers, processes them through a multi-step pipeline, and enqueues jobs to Redis for worker containers. + +### Webhook endpoints + +| Route | Provider | Notes | +|-------|----------|-------| +| `POST /trello/webhook` | Trello | HEAD/GET returns 200 for Trello's verification | +| `POST /github/webhook` | GitHub | Injects `X-GitHub-Event` header into payload | +| `POST /jira/webhook` | JIRA | HEAD/GET returns 200 for JIRA verification | +| `POST /sentry/webhook/:projectId` | Sentry | Project ID in URL for unambiguous routing | +| `GET /health` | Internal | Queue stats, active worker count | + +### Startup sequence + +Module-load phase (runs at import time, before `startRouter()`): +1. `registerBuiltInEngines()` — register engine settings schemas (required before any `loadConfig()`) +2. `createTriggerRegistry()` + `registerBuiltInTriggers()` — populate trigger handlers + +`startRouter()` async phase: +3. `seedAgentDefinitions()` — sync built-in YAML definitions to database +4. `initAgentMessages()` — load ack message templates +5. `initPrompts()` — load prompt templates +6. `startCancelListener()` — listen for run cancellation requests +7. `startWorkerProcessor()` — begin polling BullMQ for jobs and spawning containers +8. `serve()` — start Hono HTTP server + +### Key modules + +| File | Purpose | +|------|---------| +| `webhook-processor.ts` | Generic 12-step pipeline (see [02-webhook-pipeline](./02-webhook-pipeline.md)) | +| `platform-adapter.ts` | `RouterPlatformAdapter` interface | +| `adapters/` | Per-provider adapter implementations | +| `worker-manager.ts` | Spawns/monitors Docker worker containers | +| `queue.ts` | BullMQ `addJob()`, queue stats | +| `action-dedup.ts` | In-memory deduplication of webhook deliveries | +| `work-item-lock.ts` | Prevents concurrent agents on the same work item | +| `agent-type-lock.ts` | Agent-type concurrency limits | +| `cancel-listener.ts` | Listens for run cancellation via BullMQ events | +| `webhookVerification.ts` | HMAC signature verification per provider | + +## Worker + +**Entry point**: `src/worker-entry.ts` +**Port**: None (ephemeral container, no HTTP server) + +Workers are stateless, one-job-per-container processes spawned by the router's worker manager. Each worker reads its job from environment variables, processes it, and exits. + +### Environment variables + +The router passes job data to workers via Docker container env vars: + +| Variable | Purpose | +|----------|---------| +| `JOB_ID` | Unique job identifier | +| `JOB_TYPE` | `trello`, `github`, `jira`, `sentry`, `manual-run`, `retry-run`, `debug-analysis` | +| `JOB_DATA` | JSON-encoded job payload | +| `CASCADE_CREDENTIAL_KEYS` | Comma-separated list of credential env var names | +| Individual credential vars | Pre-loaded project credentials (e.g., `GITHUB_TOKEN_IMPLEMENTER`) | + +### Job types + +```typescript +type JobData = + | TrelloJobData // Trello webhook payload + | GitHubJobData // GitHub webhook payload + | JiraJobData // JIRA webhook payload + | SentryJobData // Sentry webhook payload + | ManualRunJobData // Dashboard-initiated run + | RetryRunJobData // Retry a failed run + | DebugAnalysisJobData; // Post-mortem debug analysis +``` + +### Startup sequence + +1. `loadEnvConfigSafe()` — load `.cascade/env` if present +2. `getDb()` — eagerly initialize DB connection (caches pool before env scrub) +3. `registerBuiltInEngines()` — register engine settings schemas (before `loadConfig()`) +4. `loadConfig()` — cache project config from database +5. `seedAgentDefinitions()` — sync built-in YAML definitions to database +6. `initAgentMessages()` — load ack message templates +7. `initPrompts()` — load prompt templates +8. `scrubSensitiveEnv()` — remove `DATABASE_URL` and other secrets from `process.env` +9. `createTriggerRegistry()` + `registerBuiltInTriggers()` — populate trigger handlers +10. `dispatchJob()` — route to the appropriate handler based on `JOB_TYPE` + +The security scrub in step 8 prevents agent engines (which execute arbitrary LLM-generated commands) from accessing database credentials. Note that trigger registration (step 9) happens after the scrub — it only needs the in-memory config, not the database. + +### Dispatch flow + +`dispatchJob()` switches on the job type: +- **Webhook jobs** (`trello`, `github`, `jira`, `sentry`) — call the provider-specific webhook processor, which re-runs trigger dispatch and executes the matched agent +- **Dashboard jobs** (`manual-run`, `retry-run`, `debug-analysis`) — call `processDashboardJob()`, which loads project config and invokes the appropriate runner + +## Dashboard + +**Entry point**: `src/dashboard.ts` +**Default port**: 3001 + +The dashboard serves the tRPC API consumed by both the web frontend and the `cascade` CLI. In self-hosted mode, it also serves the built frontend as static files. + +### Routes + +| Route | Purpose | +|-------|---------| +| `POST /api/auth/login` | Email/password authentication | +| `POST /api/auth/logout` | Session invalidation | +| `/trpc/*` | tRPC API endpoints | +| `GET /health` | Service health check | +| `/*` (static) | Frontend files from `dist/web/` (self-hosted mode only) | + +### Startup sequence + +Module-load phase (runs at import time, before `startDashboard()`): +1. `registerBuiltInEngines()` — register engine settings schemas +2. CORS middleware, logging middleware registered on Hono app +3. Auth routes mounted (`/api/auth/login`, `/api/auth/logout`) +4. tRPC router mounted with session-based context resolution +5. Static file serving (if `dist/web/` exists) + +`startDashboard()` async phase: +6. `initPrompts()` — load prompt templates +7. `serve()` — start Hono HTTP server + +### tRPC context + +Every tRPC request builds a context containing: +- `user` — resolved from session cookie via `resolveUserFromSession()` +- `effectiveOrgId` — computed from user's org membership or `x-org-context` header + +Procedure types enforce auth levels: `publicProcedure`, `protectedProcedure`, `adminProcedure`, `superAdminProcedure`. diff --git a/docs/architecture/02-webhook-pipeline.md b/docs/architecture/02-webhook-pipeline.md new file mode 100644 index 00000000..dfd929ed --- /dev/null +++ b/docs/architecture/02-webhook-pipeline.md @@ -0,0 +1,149 @@ +# Webhook Pipeline + +Webhooks from external providers (Trello, GitHub, JIRA, Sentry) are processed through a two-layer system: a **webhook handler factory** that handles HTTP concerns, and a **router platform adapter** that implements the business logic pipeline. + +## Webhook Handler Factory + +`src/webhook/webhookHandlers.ts` — `createWebhookHandler()` + +The factory creates Hono route handlers with a standard lifecycle: + +``` +HTTP POST → Parse payload → Verify signature → Process webhook → Log result → Return 200/4xx +``` + +Each webhook endpoint provides a `WebhookHandlerConfig`: + +```typescript +interface WebhookHandlerConfig { + source: string; // 'trello' | 'github' | 'jira' | 'sentry' + parsePayload: (c: Context) => ParseResult; + verifySignature?: (ctx, rawBody, projectId?) => VerificationResult | null; + processWebhook: (payload, eventType?, headers?) => Promise; +} +``` + +The factory handles: +- Payload parsing with per-provider parsers (`src/webhook/webhookParsers.ts`) +- Optional signature verification (`src/webhook/signatureVerification.ts`) +- Fire-and-forget acknowledgment reactions +- Webhook logging to `webhook_logs` table (`src/webhook/webhookLogging.ts`) +- Error handling (parse failures → 400, signature failures → 401) + +### Platform Parsers + +| Parser | Source | Event type extraction | +|--------|--------|----------------------| +| `parseGitHubPayload()` | JSON or form-encoded body | `X-GitHub-Event` header | +| `parseTrelloPayload()` | JSON body | `action.type` field | +| `parseJiraPayload()` | JSON body | `webhookEvent` field | +| `parseSentryPayload()` | JSON body | `Sentry-Hook-Resource` header | + +## Platform Adapters + +`src/router/platform-adapter.ts` — `RouterPlatformAdapter` interface + +Each provider implements this interface to plug into the generic `processRouterWebhook()` pipeline: + +```typescript +interface RouterPlatformAdapter { + readonly type: string; + parseWebhook(payload: unknown): Promise; + isProcessableEvent(event: ParsedWebhookEvent): boolean; + isSelfAuthored(event: ParsedWebhookEvent, payload: unknown): Promise; + sendReaction(event: ParsedWebhookEvent, payload: unknown): void; + resolveProject(event: ParsedWebhookEvent): Promise; + dispatchWithCredentials(event, payload, project, triggerRegistry): Promise; + postAck(event, payload, project, agentType, triggerResult): Promise; + buildJob(event, payload, project, triggerResult, ackResult): CascadeJob; + firePreActions?(job, payload): void; +} +``` + +### Normalized event + +All platforms normalize their webhook payload into a `ParsedWebhookEvent`: + +```typescript +interface ParsedWebhookEvent { + projectIdentifier: string; // Board ID, repo name, JIRA project key + eventType: string; // Human-readable event descriptor + workItemId?: string; // Card ID, PR number, issue key + isCommentEvent: boolean; // Whether this needs ack reaction + actionId?: string; // Platform-specific ID for dedup +} +``` + +### Provider adapters + +| Adapter | File | Project lookup key | +|---------|------|--------------------| +| `TrelloRouterAdapter` | `src/router/adapters/trello.ts` | `boardId` | +| `GitHubRouterAdapter` | `src/router/adapters/github.ts` | `repoFullName` | +| `JiraRouterAdapter` | `src/router/adapters/jira.ts` | JIRA project key | +| `SentryRouterAdapter` | `src/router/adapters/sentry.ts` | CASCADE `projectId` (from URL) | + +## The 12-Step Pipeline + +`src/router/webhook-processor.ts` — `processRouterWebhook()` + +```mermaid +flowchart TD + A[1. Parse payload] --> B{2. Duplicate?} + B -->|Yes| SKIP1[Skip: duplicate action] + B -->|No| C{3. Processable event?} + C -->|No| SKIP2[Skip: event type not processable] + C -->|Yes| D{4. Self-authored?} + D -->|Yes| SKIP3[Skip: loop prevention] + D -->|No| E[5. Fire ack reaction] + E --> F{6. Resolve project config} + F -->|Not found| SKIP4[Skip: no project config] + F -->|Found| G[7. Dispatch triggers with credentials] + G -->|No match| SKIP5[Skip: no trigger matched] + G -->|Matched| H{8. Work-item / agent-type lock} + H -->|Locked| SKIP6[Skip: concurrency limit] + H -->|Free| I[9. Post ack comment] + I --> J[10. Build job] + J --> K[11. Fire pre-actions] + K --> L[12. Enqueue to Redis] +``` + +### Step details + +1. **Parse** — Adapter normalizes raw payload into `ParsedWebhookEvent` +2. **Dedup** — Check in-memory set of recently processed `actionId`s (`action-dedup.ts`) +3. **Filter** — Adapter's `isProcessableEvent()` checks event type relevance +4. **Self-check** — Adapter's `isSelfAuthored()` detects bot's own actions (loop prevention) +5. **Reaction** — Fire-and-forget emoji reaction on the source event +6. **Resolve config** — Look up project by platform identifier (board ID, repo, etc.) +7. **Dispatch triggers** — Within credential scope, call `TriggerRegistry.dispatch()` to find matching agent +8. **Concurrency** — Check work-item lock (`work-item-lock.ts`) and agent-type concurrency (`agent-type-lock.ts`) +9. **Ack comment** — Post an acknowledgment comment to the work item or PR +10. **Build job** — Package trigger result + payload + ack info into a `CascadeJob` +11. **Pre-actions** — Optional fire-and-forget actions (e.g., GitHub eyes reaction) +12. **Enqueue** — Add job to BullMQ Redis queue; mark work item and agent type as enqueued + +### Concurrency controls + +| Mechanism | File | Purpose | +|-----------|------|---------| +| Action dedup | `action-dedup.ts` | Prevent processing same webhook delivery twice | +| Work-item lock | `work-item-lock.ts` | Prevent concurrent agents on the same card/issue | +| Agent-type lock | `agent-type-lock.ts` | Configurable `max_concurrency` per agent type per project | + +All locks are in-memory with TTL expiry. They are conservative (enqueue-time only) — the worker performs its own verification before executing. + +## Signature Verification + +`src/router/webhookVerification.ts` + +Each provider's verification function checks for a stored `webhook_secret` credential and validates the signature header: + +| Provider | Header | Algorithm | +|----------|--------|-----------| +| GitHub | `X-Hub-Signature-256` | HMAC-SHA256 | +| Trello | Custom verification | Trello-specific | +| JIRA | `X-Hub-Signature` | HMAC-SHA256 | +| Sentry | `Sentry-Hook-Signature` | HMAC-SHA256 | + +If no webhook secret is configured for a project, verification is skipped (returns `null`). diff --git a/docs/architecture/03-trigger-system.md b/docs/architecture/03-trigger-system.md new file mode 100644 index 00000000..1c44b24c --- /dev/null +++ b/docs/architecture/03-trigger-system.md @@ -0,0 +1,180 @@ +# Trigger System + +The trigger system routes webhook events to the appropriate agent. When a webhook arrives, the router builds a `TriggerContext` and calls `TriggerRegistry.dispatch()` to find the first matching handler. The matched handler returns a `TriggerResult` specifying which agent to run and with what input. + +## TriggerRegistry + +`src/triggers/registry.ts` + +A simple ordered list of handlers with first-match-wins dispatch: + +```typescript +class TriggerRegistry { + register(handler: TriggerHandler): void; + dispatch(ctx: TriggerContext): Promise; + getHandlers(): TriggerHandler[]; +} +``` + +`dispatch()` iterates handlers in registration order. For each handler: +1. Call `matches(ctx)` — if `false`, skip +2. Call `handle(ctx)` — if it returns a `TriggerResult`, return it +3. If `handle()` returns `null`, continue to next handler + +## TriggerHandler + +`src/triggers/types.ts` + +```typescript +interface TriggerHandler { + name: string; + description: string; + matches(ctx: TriggerContext): boolean; + handle(ctx: TriggerContext): Promise; +} +``` + +### TriggerContext + +```typescript +interface TriggerContext { + project: ProjectConfig; + source: TriggerSource; // 'trello' | 'github' | 'jira' | 'sentry' + payload: unknown; // Raw webhook payload + personaIdentities?: PersonaIdentities; // GitHub bot identities +} +``` + +### TriggerResult + +```typescript +interface TriggerResult { + agentType: string | null; // Which agent to run + agentInput: AgentInput; // Input data for the agent + workItemId?: string; + workItemUrl?: string; + workItemTitle?: string; + prNumber?: number; + prUrl?: string; + prTitle?: string; + waitForChecks?: boolean; // Poll CI before starting + onBlocked?: () => void; // Cleanup if job can't be enqueued +} +``` + +## Built-in Triggers + +Registration happens in `src/triggers/builtins.ts`, which delegates to per-platform `register.ts` files: + +```typescript +function registerBuiltInTriggers(registry: TriggerRegistry): void { + registerTrelloTriggers(registry); + registerJiraTriggers(registry); + registerGitHubTriggers(registry); + registerSentryTriggers(registry); +} +``` + +### Trello triggers (`src/triggers/trello/`) + +| Handler | Event | Agent | +|---------|-------|-------| +| `TrelloCommentMentionTrigger` | Bot mentioned in comment | Varies by context | +| `TrelloStatusChangedSplittingTrigger` | Card → Splitting list | `splitting` | +| `TrelloStatusChangedPlanningTrigger` | Card → Planning list | `planning` | +| `TrelloStatusChangedTodoTrigger` | Card → Todo list | `implementation` | +| `TrelloStatusChangedBacklogTrigger` | Card → Backlog list | `backlog-manager` | +| `TrelloStatusChangedMergedTrigger` | Card → Merged list | `backlog-manager` | +| `ReadyToProcessLabelTrigger` | "cascade-ready" label added | `splitting` | + +### JIRA triggers (`src/triggers/jira/`) + +| Handler | Event | Agent | +|---------|-------|-------| +| `JiraCommentMentionTrigger` | Bot mentioned in comment | Varies | +| `JiraStatusChangedTrigger` | Issue status transition | Per-status mapping | +| `JiraLabelAddedTrigger` | "cascade-ready" label added | `splitting` | + +### GitHub triggers (`src/triggers/github/`) + +| Handler | Event | Agent | +|---------|-------|-------| +| `CheckSuiteSuccessTrigger` | CI passed | `review` (with `authorMode` param) | +| `CheckSuiteFailureTrigger` | CI failed | `respond-to-ci` | +| `PrReviewSubmittedTrigger` | Review with changes_requested | `respond-to-review` | +| `ReviewRequestedTrigger` | Bot requested as reviewer | `review` | +| `PrOpenedTrigger` | PR opened | `review` | +| `PrCommentMentionTrigger` | Bot @mentioned in PR comment | `respond-to-pr-comment` | +| `PrMergedTrigger` | PR merged | PM status update (no agent) | +| `PrReadyToMergeTrigger` | PR approved + checks pass | PM status update (no agent) | +| `PrConflictDetectedTrigger` | Merge conflict on PR | `resolve-conflicts` | + +### Sentry triggers (`src/triggers/sentry/`) + +| Handler | Event | Agent | +|---------|-------|-------| +| `AlertingIssueTrigger` | Sentry issue alert | `alerting` | +| `AlertingMetricTrigger` | Sentry metric alert | `alerting` | + +## Trigger Configuration + +### Event format + +Triggers use category-prefixed events: `{category}:{event-name}` +- `pm:status-changed`, `pm:label-added` +- `scm:check-suite-success`, `scm:pr-review-submitted`, `scm:review-requested` +- `alerting:issue-created`, `alerting:metric-alert` + +### Config resolution + +`src/triggers/config-resolver.ts` + +Each trigger handler calls `isTriggerEnabled()` to check if it should fire. Resolution follows a three-tier cascade: + +1. **Database overrides** — `agent_trigger_configs` table entries per project/agent/event +2. **Definition defaults** — `defaultEnabled` and default parameters from YAML definitions +3. **Legacy fallback** — `project_integrations.triggers` JSONB (migrated automatically) + +### Context pipeline + +Each trigger in a YAML agent definition can declare a `contextPipeline` — an ordered list of context-fetching steps that run before the agent starts: + +| Step | Purpose | +|------|---------| +| `directoryListing` | List repository file structure | +| `contextFiles` | Read key project files (README, etc.) | +| `squint` | Query Squint semantic index | +| `workItem` | Fetch work item details from PM tool | +| `prepopulateTodos` | Pre-populate todo list from work item checklists | +| `prContext` | Fetch PR details, diff, reviews | +| `prConversation` | Fetch PR comments and review threads | +| `pipelineSnapshot` | Fetch CI pipeline status | +| `alertingIssue` | Fetch Sentry issue and event details | + +## Shared Agent Execution + +`src/triggers/shared/agent-execution.ts` + +After a trigger matches, the shared execution layer handles the agent lifecycle: + +```mermaid +flowchart TD + A[Trigger matched] --> B[PM lifecycle: prepareForAgent] + B --> C[Check budget] + C -->|Over budget| D[Post budget warning, skip] + C -->|Within budget| E[Resolve agent definition] + E --> F[Set credential scope] + F --> G[Run agent via engine] + G -->|Success| H[PM lifecycle: handleSuccess] + G -->|Failure| I[PM lifecycle: handleFailure] + H --> J[Trigger debug analysis if configured] + I --> J +``` + +This includes: +- PM lifecycle management (move card to "In Progress", post labels) +- Budget checking (`workItemBudgetUsd`) +- Credential scoping via `withCredentials()` +- Agent execution via `runAgent()` (see [05-engine-backends](./05-engine-backends.md)) +- Post-run lifecycle (move card to "In Review", link PR, sync checklists) +- Debug analysis triggering on failure diff --git a/docs/architecture/04-agent-system.md b/docs/architecture/04-agent-system.md new file mode 100644 index 00000000..eb583a7a --- /dev/null +++ b/docs/architecture/04-agent-system.md @@ -0,0 +1,250 @@ +# Agent System + +Agents are the core automation units in CASCADE. Each agent is defined declaratively in YAML, specifying its identity, capabilities, triggers, prompts, and lifecycle hooks. At runtime, definitions are compiled into profiles that determine which tools the agent receives and how it interacts with the PM/SCM systems. + +## Agent Definitions + +`src/agents/definitions/` + +### YAML structure + +Each built-in agent is a YAML file in `src/agents/definitions/`. Custom agents are stored in the `agent_definitions` database table. The schema is defined in `src/agents/definitions/schema.ts`. + +```yaml +identity: + emoji: "..." + label: "Implementation" + roleHint: "Writes code, runs tests, and prepares a pull request" + initialMessage: "**Implementing changes** — ..." + +integrations: + required: [pm, scm] # Fail if not configured + optional: [alerting] # Use if available + +capabilities: + required: + - fs:read + - fs:write + - shell:exec + - session:ctrl + - pm:read + - pm:write + - scm:pr + optional: + - pm:checklist + +triggers: + - event: pm:status-changed + label: "Status Changed to Todo" + defaultEnabled: false + parameters: + - name: targetStatus + type: select + options: [todo] + defaultValue: todo + contextPipeline: [directoryListing, contextFiles, squint, workItem, prepopulateTodos] + +prompts: + taskPrompt: | + Analyze and process the work item with ID: <%= it.workItemId %>. + +hooks: + trailing: + scm: + gitStatus: true + prStatus: true + builtin: + diagnostics: true + todoProgress: true + reminder: true + finish: + scm: + requiresPR: true + lifecycle: + moveOnPrepare: inProgress + moveOnSuccess: inReview + linkPR: true + syncChecklist: true + +hint: >- + Complete the current todo in as few iterations as possible. +``` + +### Key schema fields + +| Field | Purpose | +|-------|---------| +| `identity` | Agent display info (emoji, label, role hint, initial message) | +| `integrations` | Explicit integration requirements (required/optional categories) | +| `capabilities` | Required and optional capabilities that determine tool access | +| `triggers` | Events that activate this agent, with parameters and context pipelines | +| `prompts.taskPrompt` | Eta template for the agent's task prompt | +| `hooks.trailing` | Info appended to each LLM turn (git status, PR status, diagnostics) | +| `hooks.finish` | Completion requirements (must have PR, must have review, etc.) | +| `hooks.lifecycle` | PM card movement on prepare/success, PR linking, checklist sync | +| `hint` | Persistent guidance injected into the LLM context | +| `strategies` | Engine-specific strategy overrides | +| `gadgetOptions` | Special gadget builder flags (e.g., `includeReviewComments`) | + +### Three-tier definition resolution + +`src/agents/definitions/loader.ts` + +``` +1. In-memory cache (fastest, populated on first load) + ↓ miss +2. Database lookup (agent_definitions table — custom agents) + ↓ miss +3. YAML file on disk (src/agents/definitions/*.yaml — built-in agents) +``` + +Key functions: +- `resolveAgentDefinition(agentType)` — single agent, three-tier +- `resolveAllAgentDefinitions()` — merge DB + YAML +- `resolveKnownAgentTypes()` — list all known types + +## Built-in Agents + +| Agent | Capabilities | Persona | Key Triggers | +|-------|-------------|---------|--------------| +| `implementation` | fs, shell, session, pm, scm:pr | Implementer | `pm:status-changed` (todo) | +| `splitting` | fs, session, pm | Implementer | `pm:status-changed`, `pm:label-added` | +| `planning` | fs, session, pm | Implementer | `pm:status-changed` (planning) | +| `review` | fs, shell, scm:read, scm:review | Reviewer | `scm:check-suite-success`, `scm:review-requested` | +| `respond-to-review` | fs, shell, session, pm, scm | Implementer | `scm:pr-review-submitted` | +| `respond-to-ci` | fs, shell, session, scm | Implementer | `scm:check-suite-failure` | +| `respond-to-pr-comment` | fs, shell, session, scm | Implementer | `scm:pr-comment-mention` | +| `respond-to-planning-comment` | fs, session, pm | Implementer | `pm:comment-mention` | +| `backlog-manager` | fs, session, pm, scm:read | Implementer | `pm:status-changed` (backlog, merged) | +| `resolve-conflicts` | fs, shell, session, scm | Implementer | `scm:pr-conflict-detected` | +| `alerting` | fs, shell, session, alerting, scm | Implementer | `alerting:issue-created`, `alerting:metric-alert` | +| `debug` | fs, session, pm | Implementer | `internal:debug-analysis` | + +## Capabilities + +`src/agents/capabilities/` + +Capabilities are the bridge between agent definitions and concrete tools. The system maps capabilities to gadgets (for SDK engines) and SDK tools (for native-tool engines). + +### Registry + +`src/agents/capabilities/registry.ts` + +```typescript +const CAPABILITIES = [ + // Built-in (always available) + 'fs:read', 'fs:write', 'shell:exec', 'session:ctrl', + // PM integration + 'pm:read', 'pm:write', 'pm:checklist', + // SCM integration + 'scm:read', 'scm:ci-logs', 'scm:comment', 'scm:review', 'scm:pr', + // Alerting integration + 'alerting:read', +] as const; +``` + +Each capability maps to a `CapabilityDefinition`: + +```typescript +interface CapabilityDefinition { + integration: IntegrationCategory | null; // null = built-in + description: string; + gadgetNames: string[]; // LLMist gadget classes + sdkToolNames: string[]; // Claude Code SDK tool names + cliToolNames: string[]; // cascade-tools CLI commands +} +``` + +### Resolution flow + +`src/agents/capabilities/resolver.ts` + +```mermaid +flowchart TD + A["Agent definition
(capabilities.required + optional)"] --> B[Create integration checker] + B --> C["Check hasPmIntegration(),
hasScmIntegration(),
hasAlertingIntegration()"] + C --> D[resolveEffectiveCapabilities] + D --> E["Built-in caps: always included"] + D --> F["Integration caps: only if provider configured"] + E --> G[buildGadgetsFromCapabilities] + F --> G + G --> H["Instantiate gadget classes
via GADGET_CONSTRUCTORS"] + H --> I["Gadget[] passed to engine"] +``` + +- Built-in capabilities (`fs:*`, `shell:*`, `session:*`) are always available +- Integration capabilities (`pm:*`, `scm:*`, `alerting:*`) require the corresponding integration to be configured for the project +- Optional capabilities degrade gracefully — missing integrations are noted in the system prompt + +## Prompts + +`src/agents/prompts/` + +Agent prompts are built using the [Eta](https://eta.js.org/) template engine. + +### Template context + +The `PromptContext` object passed to templates includes: +- `workItemId`, `workItemUrl`, `workItemTitle` — from trigger result +- `prNumber`, `prUrl`, `prBranch` — for SCM-focused agents +- `projectConfig` — full project configuration +- `agentType` — the running agent type +- `capabilities` — resolved capability list +- `hint` — persistent guidance from definition + +### Prompt partials + +Organizations can customize agent prompts via **prompt partials** — named template fragments stored in the `prompt_partials` database table. Partials are Eta includes (`<%~ include('partialName') %>`) that override default content when a custom version exists. + +Managed via: +- Dashboard: Settings > Prompts +- CLI: `cascade prompts set-partial`, `cascade prompts reset-partial` + +## Hooks + +### Trailing hooks + +Appended to each LLM turn as ephemeral context: + +| Hook | Purpose | +|------|---------| +| `scm.gitStatus` | Current git status (uncommitted changes) | +| `scm.prStatus` | PR state, review status, CI checks | +| `builtin.diagnostics` | TypeScript/lint errors in recently edited files | +| `builtin.todoProgress` | Current todo list progress | +| `builtin.reminder` | Iteration budget and guidance reminders | + +### Finish hooks + +Completion requirements verified before the agent can finish: + +| Hook | Purpose | +|------|---------| +| `scm.requiresPR` | Agent must have created/updated a PR | +| `scm.requiresReview` | Agent must have submitted a review | +| `scm.requiresPushedChanges` | Agent must have pushed commits | + +### Lifecycle hooks + +PM card management during agent execution: + +| Hook | Purpose | +|------|---------| +| `moveOnPrepare` | Move card to status on agent start (e.g., "In Progress") | +| `moveOnSuccess` | Move card to status on success (e.g., "In Review") | +| `linkPR` | Link the created PR to the work item | +| `syncChecklist` | Sync todo list back to PM card checklists | + +## Agent Profiles + +`src/agents/definitions/profiles.ts` + +At runtime, a definition is compiled into an `AgentProfile` — the operational interface used by the execution pipeline: + +- `filterTools(allTools)` — filter available tools based on capabilities +- `allCapabilities` — resolved capability list +- `fetchContext(params)` — run context pipeline steps +- `buildTaskPrompt(input)` — render Eta task prompt template +- `getLlmistGadgets()` — instantiate gadgets for LLMist engine +- `finishHooks` — PR/review/push requirements +- `lifecycleHooks` — PM card movement rules diff --git a/docs/architecture/05-engine-backends.md b/docs/architecture/05-engine-backends.md new file mode 100644 index 00000000..fe638fe8 --- /dev/null +++ b/docs/architecture/05-engine-backends.md @@ -0,0 +1,154 @@ +# Engine Backends + +CASCADE abstracts LLM execution behind the `AgentEngine` interface. Multiple engines (Claude Code, LLMist, Codex, OpenCode) implement this interface, and a shared execution adapter orchestrates the full lifecycle around any engine. + +## AgentEngine Interface + +`src/backends/types.ts` + +```typescript +interface AgentEngine { + readonly definition: AgentEngineDefinition; + + execute(plan: AgentExecutionPlan): Promise; + supportsAgentType(agentType: string): boolean; + + // Optional hooks + resolveModel?(cascadeModel: string): string; + getSettingsSchema?(): ZodType>; + beforeExecute?(plan: AgentExecutionPlan): Promise; + afterExecute?(plan: AgentExecutionPlan, result: AgentEngineResult): Promise; +} +``` + +### AgentEngineDefinition + +Describes engine capabilities and configuration: + +```typescript +interface AgentEngineDefinition { + readonly id: string; // 'claude-code', 'llmist', 'codex', 'opencode' + readonly label: string; // Display name + readonly archetype: 'sdk' | 'native-tool'; + readonly capabilities: string[]; + readonly modelSelection: { type: 'free-text' } | { type: 'select', options: [...] }; + readonly logLabel: string; + readonly settings?: AgentEngineSettingsDefinition; +} +``` + +### AgentExecutionPlan + +The fully resolved plan passed to `engine.execute()`, combining context, prompts, and policy: + +```typescript +interface AgentExecutionPlan + extends AgentExecutionContext, // repoDir, project, agentInput, logWriter, etc. + AgentPromptSpec, // systemPrompt, taskPrompt, availableTools, contextInjections + AgentEnginePolicy { // maxIterations, model, budgetUsd, engineSettings + cliToolsDir: string; + nativeToolShimDir?: string; + completionRequirements?: CompletionRequirements; +} +``` + +## Two Engine Archetypes + +### `native-tool` — Subprocess-based CLI tools + +Used when the engine runs as an external CLI process with its own built-in file/bash tools. + +**Base class**: `NativeToolEngine` (`src/backends/shared/NativeToolEngine.ts`) + +Provides: +- `buildEngineEnv()` — construct subprocess environment with allowlisted env vars and project secrets +- `resolveModel()` delegation to `resolveEngineModel()` +- `afterExecute()` cleanup for offloaded context files + +**Implementations**: Claude Code (`src/backends/claude-code/`), Codex (`src/backends/codex/`), OpenCode (`src/backends/opencode/`) + +Native-tool engines invoke CASCADE domain tools (PM, SCM, alerting) via the `cascade-tools` CLI binary through Bash commands. File operations use the engine's built-in tools (Read, Write, Edit, Bash, Glob, Grep). + +### `sdk` — In-process SDK integrations + +Used when the engine runs in-process and manages its own LLM API calls. + +**Implementation**: LLMist (`src/backends/llmist/`) + +SDK engines invoke gadgets server-side as synthetic tool calls — the engine calls the gadget function directly and injects the result into the LLM context. + +## Engine Registry + +`src/backends/registry.ts` + +```typescript +function registerEngine(engine: AgentEngine): void; +function getEngine(name: string): AgentEngine; +function getEngineCatalog(): AgentEngineDefinition[]; +``` + +Engines are registered at bootstrap (`src/backends/bootstrap.ts`) before any config loading or webhook processing begins. + +### Engine resolution + +When an agent runs, the engine is resolved in order: +1. Agent-type override (from `agent_configs.agent_engine` for this project + agent type) +2. Project-level default (`project.agentEngine.default`) +3. Global fallback: `'claude-code'` + +## Execution Adapter + +`src/backends/adapter.ts` — `executeWithEngine()` + +This is the central orchestration function that wraps every engine call. It handles everything that is common across engines: + +```mermaid +sequenceDiagram + participant C as Caller + participant A as Adapter + participant S as Secret Orchestrator + participant E as Engine + participant D as Database + + C->>A: executeWithEngine(engine, agentType, input) + A->>A: Setup repo directory (clone if needed) + A->>A: Create FileLogger + LogWriter + A->>D: Create run record + A->>S: Build AgentExecutionPlan + S->>S: Resolve model, fetch context, build prompts + S->>S: Resolve project secrets, engine settings + A->>A: Start progress monitor + A->>E: engine.beforeExecute(plan) + A->>E: engine.execute(plan) + E-->>A: AgentEngineResult + A->>E: engine.afterExecute(plan, result) + A->>A: Post-process result (extract PR evidence) + A->>A: Run continuation loop if needed + A->>D: Finalize run record (status, cost, logs) + A->>A: Cleanup (repo deletion, temp files) + A-->>C: AgentResult +``` + +### Key stages + +1. **Repo setup** — Clone repository or use existing working directory +2. **Run record** — Create `agent_runs` database entry with `running` status +3. **Plan building** (`src/backends/secretOrchestrator.ts`) — Resolve model, fetch context injections, build system/task prompts, gather project secrets, merge engine settings +4. **Progress monitoring** (`src/backends/progressMonitor.ts`) — Timer-based progress updates posted to PM card and/or GitHub PR comment +5. **Engine execution** — `beforeExecute()` → `execute()` → `afterExecute()` +6. **Completion verification** (`src/backends/completion.ts`) — Check sidecar files for PR/review/push evidence +7. **Continuation loop** (`src/backends/shared/continuationLoop.ts`) — Re-invoke engine if completion requirements not met +8. **Finalization** — Update run record with status, duration, cost, logs; upload logs + +### LLM call logging + +`src/backends/shared/llmCallLogger.ts` + +All LLM requests and responses are logged to the `agent_run_llm_calls` table, tracking: +- Request/response content +- Token counts (input, output, cached) +- Cost (USD) +- Duration +- Tool calls made + +For further details on adding a new engine, see [`docs/adding-engines.md`](../adding-engines.md). diff --git a/docs/architecture/06-integration-layer.md b/docs/architecture/06-integration-layer.md new file mode 100644 index 00000000..8cc8bf4d --- /dev/null +++ b/docs/architecture/06-integration-layer.md @@ -0,0 +1,173 @@ +# Integration Layer + +CASCADE uses a unified integration abstraction so that infrastructure code (router, worker, webhook handlers) never branches on provider type. Every PM, SCM, and alerting provider is a class implementing `IntegrationModule`, registered into a singleton `IntegrationRegistry` at bootstrap. + +## IntegrationModule + +`src/integrations/types.ts` + +The base contract for all integrations: + +```typescript +interface IntegrationModule { + readonly type: string; // 'trello', 'jira', 'github', 'sentry' + readonly category: IntegrationCategory; // 'pm' | 'scm' | 'alerting' + + withCredentials(projectId: string, fn: () => Promise): Promise; + hasIntegration(projectId: string): Promise; + + // Optional webhook methods + parseWebhookPayload?(raw: unknown): IntegrationWebhookEvent | null; + isSelfAuthored?(event: unknown, projectId: string): Promise; + lookupProject?(identifier: string): Promise<{ project; config } | null>; + extractWorkItemId?(text: string): string | null; +} +``` + +### Credential scoping + +`withCredentials()` uses `AsyncLocalStorage` to set provider-specific env vars for the duration of a callback, then restores the original values. This provides per-request credential isolation without global state mutation. + +### Integration checking + +`hasIntegration()` checks that all required credential roles for the provider are configured for the given project. Role definitions come from `src/config/integrationRoles.ts`. + +## IntegrationRegistry + +`src/integrations/registry.ts` + +```typescript +class IntegrationRegistry { + register(integration: IntegrationModule): void; + get(type: string): IntegrationModule; // throws if missing + getOrNull(type: string): IntegrationModule | null; + getByCategory(category: IntegrationCategory): IntegrationModule[]; + all(): IntegrationModule[]; +} + +const integrationRegistry: IntegrationRegistry; // singleton +``` + +## Category Interfaces + +### PMIntegration + +`src/pm/integration.ts` — extends `IntegrationModule` with PM-specific methods: + +- `createProvider(project)` — create a `PMProvider` instance for CRUD operations +- `resolveLifecycleConfig(project)` — extract labels, statuses, list IDs from project config +- `postAckComment(projectId, workItemId, message)` — post acknowledgment comment +- `deleteAckComment(projectId, workItemId, commentId)` — remove ack comment +- `sendReaction(projectId, event)` — add emoji reaction to source event +- `lookupProject(identifier)` — find project by board ID or project key +- `extractWorkItemId(text)` — parse work item ID from text (e.g., Trello URL, JIRA key) + +### SCMIntegration + +`src/integrations/scm.ts` — extends `IntegrationModule` with SCM-specific methods for webhook payload parsing and project lookup by repository name. + +### AlertingIntegration + +`src/integrations/alerting.ts` — extends `IntegrationModule` with alerting-specific methods. + +## Bootstrap + +`src/integrations/bootstrap.ts` + +Single, idempotent registration point for all four built-in integrations. Safe to import from router, worker, and dashboard — it does not pull in the agent execution pipeline or template files. + +``` +TrelloIntegration → integrationRegistry + pmRegistry +JiraIntegration → integrationRegistry + pmRegistry +GitHubSCMIntegration → integrationRegistry +SentryAlertingIntegration → integrationRegistry +``` + +## Credential Roles + +`src/config/integrationRoles.ts` + +Each provider declares its credential roles — the mapping from logical role names to environment variable keys: + +| Provider | Category | Required Roles | Optional Roles | +|----------|----------|---------------|----------------| +| Trello | pm | `api_key` → `TRELLO_API_KEY`, `token` → `TRELLO_TOKEN` | `api_secret` | +| JIRA | pm | `email` → `JIRA_EMAIL`, `api_token` → `JIRA_API_TOKEN` | `webhook_secret` | +| GitHub | scm | `implementer_token` → `GITHUB_TOKEN_IMPLEMENTER`, `reviewer_token` → `GITHUB_TOKEN_REVIEWER` | `webhook_secret` | +| Sentry | alerting | `api_token` → `SENTRY_API_TOKEN` | `webhook_secret` | + +## Provider Implementations + +### Trello (`src/pm/trello/`, `src/trello/`) + +- `TrelloIntegration` implements `PMIntegration` +- `TrelloPMProvider` implements `PMProvider` (card CRUD, comments, labels, checklists) +- `trelloClient` — Octokit-style client with AsyncLocalStorage credential scoping +- Media extraction from markdown in card descriptions/comments +- Status = list ID (cards grouped by lists) + +### JIRA (`src/pm/jira/`, `src/jira/`) + +- `JiraIntegration` implements `PMIntegration` +- `JiraPMProvider` implements `PMProvider` (issue CRUD, transitions, comments) +- `jiraClient` — wraps `jira.js` Version3Client with AsyncLocalStorage scoping +- ADF (Atlassian Document Format) ↔ markdown conversion (`src/pm/jira/adf.ts`) +- Status transitions via JIRA transition ID lookup +- Issue key extraction via regex: `[A-Z][A-Z0-9]+-\d+` + +### GitHub (`src/github/`) + +- `GitHubSCMIntegration` implements `SCMIntegration` +- `githubClient` — Octokit wrapper with `withGitHubToken()` AsyncLocalStorage scoping +- **Dual-persona model** (`src/github/personas.ts`): + - **Implementer** — writes code, creates PRs (used by most agents) + - **Reviewer** — reviews PRs, can approve or request changes (used by `review` agent) + - `isCascadeBot(login)` — checks if a GitHub login belongs to either persona + - `resolvePersonaIdentities()` — resolves both tokens to usernames (cached 60s per project) +- Loop prevention: `respond-to-review` only fires on reviewer's `changes_requested`; comment triggers skip @mentions from any known persona + +### Sentry (`src/sentry/`) + +- `SentryAlertingIntegration` implements `AlertingIntegration` +- `sentryClient` — REST API client with Bearer token auth +- Supports issue alerts, metric alerts, and issue lifecycle webhooks +- Config: `organizationSlug` stored in `project_integrations.config` JSONB + +## PM Abstraction + +`src/pm/` + +### PMProvider interface + +Lower-level data operations consumed by gadgets and lifecycle hooks: + +```typescript +interface PMProvider { + getWorkItem(id: string): Promise; + listWorkItems(filter?): Promise; + createWorkItem(config): Promise; + updateWorkItem(id, updates): Promise; + moveToStatus(id, status): Promise; + addComment(id, text): Promise; + getChecklists(id): Promise; + addLabel(id, label): Promise; + removeLabel(id, label): Promise; + linkPR(id, prUrl): Promise; + // ... more operations +} +``` + +### PMRegistry + +`src/pm/registry.ts` — backward-compatible PM-specific registry. Maps PM type to integration instance. Used by trigger handlers and gadgets that need PM operations. + +### PM Lifecycle Manager + +`src/pm/lifecycle.ts` — orchestrates card/issue state during agent execution: + +- `prepareForAgent()` — add processing label, move to "In Progress" +- `handleSuccess()` — add processed label, move to "In Review", link PR +- `handleFailure()` — add error label, post error comment +- `cleanupProcessing()` — remove processing label + +For the complete step-by-step guide to adding a new integration, see [`src/integrations/README.md`](../../src/integrations/README.md). diff --git a/docs/architecture/07-gadgets.md b/docs/architecture/07-gadgets.md new file mode 100644 index 00000000..3d1ab228 --- /dev/null +++ b/docs/architecture/07-gadgets.md @@ -0,0 +1,119 @@ +# Gadgets + +Gadgets are the tool implementations that agents use to interact with their environment. They are the concrete operations behind capabilities — when an agent definition declares `fs:write`, the capability registry maps that to gadgets like `WriteFile`, `FileSearchAndReplace`, and `FileMultiEdit`. + +## Capability-to-Gadget Mapping + +The `CAPABILITY_REGISTRY` in `src/agents/capabilities/registry.ts` is the single source of truth: + +``` +Agent YAML definition + → capabilities.required + optional + → CAPABILITY_REGISTRY lookup + → gadgetNames[] per capability + → GADGET_CONSTRUCTORS instantiation + → Gadget[] passed to engine +``` + +For **SDK engines** (LLMist): gadgets are instantiated as server-side classes and invoked directly when the LLM makes a tool call. + +For **native-tool engines** (Claude Code, Codex, OpenCode): the engine uses its own built-in tools for file/shell operations. Domain tools (PM, SCM, alerting) are invoked via the `cascade-tools` CLI binary through Bash commands. + +## Built-in Gadgets + +### File system (`fs:read`, `fs:write`) + +| Gadget | Capability | Purpose | +|--------|-----------|---------| +| `ListDirectory` | `fs:read` | List directory contents | +| `ReadFile` | `fs:read` | Read file contents | +| `RipGrep` | `fs:read` | Regex code search | +| `AstGrep` | `fs:read` | AST-based code search | +| `WriteFile` | `fs:write` | Write file contents | +| `FileSearchAndReplace` | `fs:write` | Search and replace in files | +| `FileMultiEdit` | `fs:write` | Multiple edits in a single file | +| `VerifyChanges` | `fs:write` | Verify edits produce expected results | + +All file gadgets validate paths against allowed directories (working directory + `/tmp`). Write gadgets run post-edit diagnostics to catch syntax errors immediately. + +### Shell (`shell:exec`) + +| Gadget | Capability | Purpose | +|--------|-----------|---------| +| `Tmux` | `shell:exec` | Execute shell commands in a tmux session | +| `Sleep` | `shell:exec` | Wait for a specified duration | + +### Session (`session:ctrl`) + +| Gadget | Capability | Purpose | +|--------|-----------|---------| +| `Finish` | `session:ctrl` | Signal task completion | +| `TodoUpsert` | `session:ctrl` | Create or update a todo item | +| `TodoUpdateStatus` | `session:ctrl` | Mark todo as pending/in_progress/done | +| `TodoDelete` | `session:ctrl` | Remove a todo item | + +Todos are stored in `.claude/todos.json` within the repo working directory. + +### PM (`pm:read`, `pm:write`, `pm:checklist`) + +| Gadget | Capability | Purpose | +|--------|-----------|---------| +| `ReadWorkItem` | `pm:read` | Fetch work item details | +| `ListWorkItems` | `pm:read` | List work items with filters | +| `UpdateWorkItem` | `pm:write` | Update work item fields | +| `CreateWorkItem` | `pm:write` | Create new work item | +| `MoveWorkItem` | `pm:write` | Move work item to a status/list | +| `PostComment` | `pm:write` | Post comment on work item | +| `AddChecklist` | `pm:write` | Add checklist to work item | +| `PMUpdateChecklistItem` | `pm:checklist` | Update checklist item status | +| `PMDeleteChecklistItem` | `pm:checklist` | Delete checklist item | + +PM gadgets use the active `PMProvider` from `AsyncLocalStorage` context, making them provider-agnostic. + +### SCM (`scm:read`, `scm:ci-logs`, `scm:comment`, `scm:review`, `scm:pr`) + +| Gadget | Capability | Purpose | +|--------|-----------|---------| +| `GetPRDetails` | `scm:read` | Fetch PR metadata and state | +| `GetPRDiff` | `scm:read` | Get PR diff (additions/deletions) | +| `GetPRChecks` | `scm:read` | Get CI check status | +| `GetCIRunLogs` | `scm:ci-logs` | Download failed CI job logs | +| `PostPRComment` | `scm:comment` | Post issue comment on PR | +| `UpdatePRComment` | `scm:comment` | Update existing comment | +| `GetPRComments` | `scm:comment` | List PR comments | +| `ReplyToReviewComment` | `scm:comment` | Reply to inline review comment | +| `CreatePRReview` | `scm:review` | Submit code review | +| `CreatePR` | `scm:pr` | Create pull request | + +### Alerting (`alerting:read`) + +| Gadget | Capability | Purpose | +|--------|-----------|---------| +| `GetAlertingIssue` | `alerting:read` | Fetch Sentry issue details | +| `GetAlertingEventDetail` | `alerting:read` | Fetch specific event with stacktrace | +| `ListAlertingEvents` | `alerting:read` | List recent events for an issue | + +## cascade-tools CLI + +`src/cli/` — the `cascade-tools` binary + +Native-tool engines cannot invoke gadget classes directly (they run as subprocesses). Instead, they call `cascade-tools` via Bash commands. The CLI is organized by category: + +| Category | Commands | Example | +|----------|----------|---------| +| PM | `cascade-tools pm read-card`, `list-cards`, `update-card`, etc. | `cascade-tools pm read-card --cardId=abc123 --raw-json` | +| SCM | `cascade-tools github get-pr-details`, `get-diff`, `post-comment`, etc. | `cascade-tools github get-pr-details --pr-number=42` | +| Alerting | `cascade-tools sentry get-issue`, `list-events`, etc. | `cascade-tools sentry get-issue --issue-id=12345` | +| Session | `cascade-tools session todo-upsert`, `todo-status`, etc. | `cascade-tools session todo-upsert --id=1 --title="Fix tests"` | + +The `cascade-tools` binary uses a separate oclif config (`bin/cascade-tools.js`) that discovers all non-dashboard commands, while `cascade` discovers only dashboard commands. + +## Session State + +`src/gadgets/sessionState.ts` + +Gadgets communicate session-level state via a shared `SessionState` object: +- Progress comment ID (for updating in-place ack comments) +- GitHub auth mode (which persona is active) +- Read tracking — which files have been read (avoids re-reads) +- Edited files tracking — for post-edit diagnostics diff --git a/docs/architecture/08-config-credentials.md b/docs/architecture/08-config-credentials.md new file mode 100644 index 00000000..700548c5 --- /dev/null +++ b/docs/architecture/08-config-credentials.md @@ -0,0 +1,153 @@ +# Configuration and Credentials + +CASCADE stores all project configuration in PostgreSQL. There are no config files read at runtime — the database is the sole source of truth. + +## Config Provider + +`src/config/provider.ts` + +The config provider loads project configuration from the database with in-memory caching. + +### Loading functions + +| Function | Lookup key | Returns | +|----------|-----------|---------| +| `loadConfig()` | All projects | `CascadeConfig` (all projects in org) | +| `loadProjectConfigByBoardId(boardId)` | Trello board ID | `{ project, config }` | +| `loadProjectConfigByRepo(repo)` | GitHub `owner/repo` | `{ project, config }` | +| `loadProjectConfigByJiraProjectKey(key)` | JIRA project key | `{ project, config }` | +| `loadProjectConfigById(id)` | CASCADE project ID | `{ project, config }` | + +### Caching + +`src/config/configCache.ts` — in-memory cache with TTL populated at service startup. Caches: +- Full config object +- Per-project lookups by board ID, repo, JIRA key +- Invalidated on config writes (via tRPC mutations) + +## Config Schema + +`src/config/schema.ts` + +Project configuration is validated with Zod schemas. Key fields: + +```typescript +interface ProjectConfig { + id: string; + orgId: string; + name: string; + repo?: string; // GitHub owner/repo + baseBranch: string; // default: 'main' + branchPrefix: string; // default: 'feature/' + model: string; // LLM model identifier + maxIterations: number; // default: 50 + watchdogTimeoutMs: number; // default: 30 min + workItemBudgetUsd: number; // default: $5 + progressModel: string; + progressIntervalMinutes: number; // default: 5 + agentEngine?: { default: string; overrides: Record }; + engineSettings?: EngineSettings; + agentEngineSettings?: Record; + runLinksEnabled: boolean; + maxInFlightItems?: number; + // ... PM config (trello/jira), agent models, snapshot settings +} +``` + +## Credential Resolution + +CASCADE uses a two-tier credential resolution system, selecting the appropriate resolver based on execution context. + +### Router / Dashboard context + +Uses `DbCredentialResolver` — reads credentials from the `project_credentials` database table: + +```typescript +getIntegrationCredential(projectId, category, role) // e.g., ('proj1', 'pm', 'api_key') +getAllProjectCredentials(projectId) // All credentials as env-var-key map +``` + +### Worker context + +Uses `EnvCredentialResolver` — reads from `process.env` (pre-loaded by the router's `worker-env.ts`): + +The router builds the worker's environment by: +1. Loading all project credentials from the database +2. Setting them as individual env vars on the Docker container +3. Setting `CASCADE_CREDENTIAL_KEYS` with a comma-separated list of the env var names + +When the worker starts, it detects `CASCADE_CREDENTIAL_KEYS` and uses `EnvCredentialResolver` instead of hitting the database. + +### Auto-selection + +```typescript +// If CASCADE_CREDENTIAL_KEYS is set → worker context (env resolver) +// Otherwise → router/dashboard context (DB resolver) +``` + +### AsyncLocalStorage scoping + +Provider clients use `AsyncLocalStorage` for per-request credential isolation: + +```typescript +// GitHub +await withGitHubToken(token, async () => { + // All GitHub API calls in this scope use this token +}); + +// Trello +await withTrelloCredentials({ apiKey, token }, async () => { + // All Trello API calls use these credentials +}); + +// JIRA +await withJiraCredentials({ email, apiToken, baseUrl }, async () => { + // All JIRA API calls use these credentials +}); +``` + +## Credential Encryption + +`src/db/crypto.ts` + +When `CREDENTIAL_MASTER_KEY` is set (64-char hex string = 32-byte AES-256 key), credentials are encrypted at rest. + +- **Algorithm**: AES-256-GCM with 12-byte random IV and 16-byte auth tag +- **AAD**: `projectId` (additional authenticated data) +- **Storage format**: `enc:v1:::` +- **Transparent**: `writeProjectCredential()` encrypts before DB write; read functions decrypt automatically +- **Opt-in**: Without the env var, credentials are stored and read as plaintext + +### Key management + +```bash +npm run credentials:generate-key # Generate new 32-byte key +npm run credentials:encrypt # Encrypt all existing plaintext credentials +npm run credentials:decrypt # Rollback to plaintext +npm run credentials:rotate-key # Re-encrypt with CREDENTIAL_MASTER_KEY_NEW +``` + +## Integration Roles + +`src/config/integrationRoles.ts` + +Maps provider → category → credential roles. Each role maps a logical name to an env var key: + +```typescript +registerCredentialRoles('trello', 'pm', [ + { role: 'api_key', label: 'API Key', envVarKey: 'TRELLO_API_KEY' }, + { role: 'token', label: 'Token', envVarKey: 'TRELLO_TOKEN' }, +]); +``` + +`hasIntegration()` returns `true` only if all non-optional roles have values stored. + +## Engine Settings + +`src/config/engineSettings.ts` + +Per-engine configuration schemas registered dynamically at bootstrap. Settings are merged at execution time: +1. Project-level `engineSettings` (base) +2. Agent-config-level `agentEngineSettings[agentType]` (override) + +Each engine optionally provides a `getSettingsSchema()` method that returns a Zod schema, registered via `registerEngineSettingsSchema()`. diff --git a/docs/architecture/09-database.md b/docs/architecture/09-database.md new file mode 100644 index 00000000..55ba8f08 --- /dev/null +++ b/docs/architecture/09-database.md @@ -0,0 +1,197 @@ +# Database + +CASCADE uses PostgreSQL with [Drizzle ORM](https://orm.drizzle.team/) for type-safe database access. All data access goes through repository modules — no raw SQL in application code. + +## Schema + +`src/db/schema/` + +```mermaid +erDiagram + organizations ||--o{ projects : "has" + organizations ||--o{ users : "has" + organizations ||--o{ prompt_partials : "has" + + projects ||--o{ project_integrations : "has" + projects ||--o{ project_credentials : "has" + projects ||--o{ agent_configs : "has" + projects ||--o{ agent_definitions : "has" + projects ||--o{ agent_trigger_configs : "has" + projects ||--o{ agent_runs : "tracks" + projects ||--o{ pr_work_items : "maps" + + agent_runs ||--o| agent_run_logs : "has" + agent_runs ||--o{ agent_run_llm_calls : "logs" + agent_runs ||--o| debug_analyses : "analyzed by" + + users ||--o{ sessions : "has" + + organizations { + text id PK + text name + jsonb settings + } + + projects { + text id PK + text org_id FK + text name + text repo + text base_branch + text model + integer max_iterations + integer watchdog_timeout_ms + numeric work_item_budget_usd + jsonb agent_engine + jsonb engine_settings + } + + project_integrations { + uuid id PK + text project_id FK + text category + text provider + jsonb config + jsonb triggers + } + + project_credentials { + uuid id PK + text project_id FK + text env_var_key + text value + } + + agent_configs { + uuid id PK + text project_id FK + text agent_type + text model + integer max_iterations + text agent_engine + jsonb agent_engine_settings + integer max_concurrency + text system_prompt + text task_prompt + } + + agent_trigger_configs { + uuid id PK + text project_id FK + text agent_type + text event + boolean enabled + jsonb parameters + } + + agent_runs { + uuid id PK + text project_id FK + text agent_type + text status + text model + integer llm_iterations + integer gadget_calls + numeric cost_usd + integer duration_ms + text pr_url + text work_item_id + text error + } + + agent_run_logs { + uuid id PK + uuid run_id FK + text cascade_log + text engine_log + } + + agent_run_llm_calls { + uuid id PK + uuid run_id FK + integer call_number + jsonb request + jsonb response + integer input_tokens + integer output_tokens + numeric cost_usd + integer duration_ms + } +``` + +### Key tables + +| Table | Purpose | Key constraints | +|-------|---------|-----------------| +| `organizations` | Multi-tenant organization definitions | — | +| `projects` | Per-project config (repo, model, budget, engine) | `repo` UNIQUE | +| `project_integrations` | Integration configs with category/provider | UNIQUE(`project_id`, `category`) | +| `project_credentials` | Encrypted credentials keyed by env var name | UNIQUE(`project_id`, `env_var_key`) | +| `agent_configs` | Per-agent-type overrides per project | UNIQUE(`project_id`, `agent_type`), `project_id NOT NULL` | +| `agent_definitions` | Agent YAML definitions (built-in + custom) | UNIQUE(`agent_type`) | +| `agent_trigger_configs` | Trigger enable/disable + parameters per project/agent/event | UNIQUE(`project_id`, `agent_type`, `event`) | +| `agent_runs` | Agent execution records with status, cost, duration | Indexed on `project_id`, `status`, `started_at` | +| `agent_run_logs` | Cascade log + engine log per run | One-to-one with `agent_runs` | +| `agent_run_llm_calls` | LLM request/response pairs with token/cost tracking | — | +| `prompt_partials` | Org-scoped prompt template customizations | UNIQUE(`org_id`, `name`) | +| `pr_work_items` | Maps PRs to work items for run-link display | — | +| `webhook_logs` | Raw webhook payloads for debugging | — | +| `users` | Dashboard users (email, bcrypt hash, role) | Org-scoped | +| `sessions` | Session tokens for cookie auth (30-day expiry) | — | +| `debug_analyses` | AI debug analysis results | — | + +## Repositories + +`src/db/repositories/` + +Each table has a dedicated repository providing typed query methods. Key repositories: + +| Repository | Purpose | +|------------|---------| +| `configRepository` | Load full project config from DB, merge integrations + credentials | +| `configMapper` | Transform raw DB rows to typed `ProjectConfig` objects | +| `credentialsRepository` | Credential CRUD with transparent encryption/decryption | +| `runsRepository` | Run lifecycle (create, update status, query by project/status) | +| `runLogsRepository` | Store and retrieve cascade + engine logs | +| `llmCallsRepository` | Log and query LLM request/response pairs | +| `agentConfigsRepository` | Per-agent settings CRUD | +| `agentDefinitionsRepository` | Agent definition CRUD (YAML ↔ JSONB) | +| `agentTriggerConfigsRepository` | Trigger enable/disable/params per project/agent/event | +| `integrationsRepository` | Query integration configuration | +| `projectsRepository` | Project CRUD | +| `organizationsRepository` | Organization CRUD | +| `usersRepository` | User management | +| `partialsRepository` | Prompt partial CRUD | +| `prWorkItemsRepository` | PR ↔ work item mapping | +| `webhookLogsRepository` | Webhook audit trail | +| `debugAnalysisRepository` | Debug analysis results | + +## Connection Management + +`src/db/client.ts` + +- `DatabaseContext` class wraps Drizzle instance + `pg.Pool` +- `getDb()` returns a singleton, lazily initialized from `DATABASE_URL` +- SSL support with optional CA certificate (`DATABASE_CA_CERT`) +- In workers, the DB connection is initialized eagerly (before env scrub removes `DATABASE_URL`) + +## Migrations + +Migrations are hand-written SQL files in `src/db/migrations/`, tracked by drizzle-kit's journal (`meta/_journal.json`). + +### Adding a migration + +1. Create `src/db/migrations/NNNN_description.sql` +2. Add entry to `src/db/migrations/meta/_journal.json` with unique `when` timestamp and `tag` matching filename +3. Run `npm run db:migrate` + +### Scripts + +| Command | Purpose | +|---------|---------| +| `npm run db:migrate` | Apply pending migrations | +| `npm run db:generate` | Generate migration SQL from schema changes | +| `npm run db:push` | Push schema directly (dev only) | +| `npm run db:studio` | Open Drizzle Studio | +| `npm run db:seed` | Seed from `config/projects.json` | +| `npm run db:bootstrap-journal` | Register existing migrations (one-time for `push`-initialized DBs) | diff --git a/docs/architecture/10-resilience.md b/docs/architecture/10-resilience.md new file mode 100644 index 00000000..8123e192 --- /dev/null +++ b/docs/architecture/10-resilience.md @@ -0,0 +1,141 @@ +# Resilience + +CASCADE runs long-lived agent sessions (up to 30+ minutes) against external LLM APIs. The resilience layer ensures reliable operation through watchdog timers, concurrency controls, rate limiting, retry strategies, and loop prevention. + +## Watchdog + +`src/utils/lifecycle.ts` + +Each worker container has a configurable watchdog timer that force-exits the process if the agent exceeds its timeout: + +- **Timeout**: Configurable per project via `watchdogTimeoutMs` (default: 30 minutes) +- **Cleanup**: A cleanup callback is registered via `setWatchdogCleanup()` and called before force exit (with a 10-second cap) +- **Router-side buffer**: The router's worker manager adds a 2-minute buffer on top of the worker watchdog before considering a container orphaned + +```typescript +startWatchdog(timeoutMs, () => { + // cleanup callback: finalize run record, upload logs +}); +``` + +## Concurrency Controls + +### Work-item lock + +`src/router/work-item-lock.ts` + +Prevents multiple agents from working on the same card/issue simultaneously. The lock is in-memory (router process) with TTL expiry. + +- Checked at webhook processing time (step 8 of the pipeline) +- Marked when job is enqueued, cleared when worker completes +- Key: `(projectId, workItemId, agentType)` + +### Agent-type concurrency limit + +`src/router/agent-type-lock.ts` + +Configurable `max_concurrency` per agent type per project (set via `agent_configs.max_concurrency`). Prevents too many instances of the same agent type running simultaneously. + +- Tracks enqueued + running counts +- Blocks new jobs when limit reached +- Includes a "recently dispatched" window to prevent race conditions between enqueueing and worker startup + +### Max in-flight items + +`projects.max_in_flight_items` — project-level cap on total concurrent agent runs. Checked during trigger dispatch. + +### BullMQ concurrency + +The router's worker manager limits how many Docker containers run in parallel via `routerConfig.maxWorkers`. + +## Rate Limiting + +`src/config/rateLimits.ts` + +Proactive, model-specific rate limits prevent hitting LLM provider quotas. Configured per model with safety margins (80-90% of actual limits): + +- **RPM** (requests per minute) +- **TPM** (tokens per minute) +- **Daily token limit** + +Rate limits are enforced by the LLMist SDK for `sdk`-archetype engines. Native-tool engines (Claude Code, Codex) handle rate limiting internally. + +## Retry Strategy + +`src/config/retryConfig.ts` + +Handles transient LLM API failures: + +- **5 retry attempts** with exponential backoff (1s base, 60s max) +- **Jitter** randomization prevents thundering herd +- **Respects `Retry-After` headers** (capped at 2 minutes) +- **Custom detection** for undici/fetch stream termination errors +- **Logging** and Sentry breadcrumbs on each retry and exhaustion + +Retries cover: HTTP 429 (rate limit), 5xx (server errors), timeouts, and connection failures. + +## Context Compaction + +`src/config/compactionConfig.ts` + +Prevents context window overflow during long-running agent sessions: + +- **Trigger**: 80% context usage +- **Target**: Reduce to 50% +- **Preserve**: 5 most recent turns +- **Strategy**: Hybrid summarization + sliding window +- Summarization preserves: task goals, key decisions, discovered facts, errors, and failed approaches (to avoid repeating them) +- Clears read-tracking state after compaction + +## Iteration Hints + +`src/config/hintConfig.ts` + +Ephemeral trailing messages showing the agent its iteration budget: + +- Displayed at configurable thresholds +- Urgency warnings at >80%: "ITERATION BUDGET: 17/20 - Only 3 remaining!" +- Helps the LLM prioritize and wrap up before hitting limits + +## Loop Prevention + +### Bot identity detection + +`src/github/personas.ts` — `isCascadeBot(login)` + +Both GitHub persona usernames (implementer + reviewer) are resolved and cached. Event handlers check if the event author is a known persona to prevent self-triggered loops: + +- `respond-to-review` only fires when the **reviewer** persona submits `changes_requested` +- `respond-to-pr-comment` skips @mentions from **any** known persona +- Trello/JIRA handlers check their bot member/account IDs similarly + +### Self-authored event filtering + +Each `RouterPlatformAdapter.isSelfAuthored()` checks the webhook payload author against known bot identities. Self-authored events are logged and discarded at step 4 of the webhook pipeline. + +## Security + +### Environment scrubbing + +`src/utils/envScrub.ts` — `scrubSensitiveEnv()` + +After the worker initializes its DB connection and caches config, sensitive env vars (`DATABASE_URL`, master keys) are removed from `process.env`. This prevents LLM-generated shell commands (executed by agents) from accessing database credentials. + +### Credential encryption at rest + +See [08-config-credentials](./08-config-credentials.md) — AES-256-GCM encryption with transparent encrypt/decrypt. + +## Orphan Cleanup + +`src/router/orphan-cleanup.ts` + +Periodic scan for Docker containers that outlived their expected lifetime (watchdog timeout + buffer). Orphans are killed and their run records marked as failed. + +## Snapshot Management + +`src/router/snapshot-manager.ts`, `src/router/snapshot-cleanup.ts` + +Optional container snapshots for warm restarts: +- After a worker completes, its container state can be snapshotted +- Subsequent runs for the same project reuse the snapshot (faster startup, cached dependencies) +- Snapshots have a configurable TTL (`snapshotTtlMs`) and are cleaned up periodically diff --git a/tests/unit/architecture-docs.test.ts b/tests/unit/architecture-docs.test.ts new file mode 100644 index 00000000..037eef5e --- /dev/null +++ b/tests/unit/architecture-docs.test.ts @@ -0,0 +1,163 @@ +import { existsSync, readFileSync } from 'node:fs'; +import path from 'node:path'; + +const DOCS_ROOT = path.resolve(__dirname, '../../docs'); +const ARCH_DIR = path.join(DOCS_ROOT, 'architecture'); + +function readDoc(filePath: string): string { + return readFileSync(filePath, 'utf-8'); +} + +function extractMarkdownLinks(content: string): string[] { + const linkPattern = /\[.*?\]\((\.\.?\/[^)]+\.md)\)/g; + return Array.from(content.matchAll(linkPattern), (m) => m[1]); +} + +describe('Architecture documentation', () => { + describe('hub document (ARCHITECTURE.md)', () => { + const hubPath = path.join(DOCS_ROOT, 'ARCHITECTURE.md'); + + it('exists', () => { + expect(existsSync(hubPath)).toBe(true); + }); + + it('contains expected sections', () => { + const content = readDoc(hubPath); + const expectedSections = [ + 'System Overview', + 'Service Topology', + 'End-to-End Request Flow', + 'Architectural Patterns', + 'Directory Map', + 'Deep-Dive Documents', + ]; + for (const section of expectedSections) { + expect(content).toContain(section); + } + }); + + it('contains mermaid diagrams', () => { + const content = readDoc(hubPath); + expect(content).toContain('```mermaid'); + }); + + it('links to all 10 deep-dive documents', () => { + const content = readDoc(hubPath); + const deepDiveFiles = [ + '01-services.md', + '02-webhook-pipeline.md', + '03-trigger-system.md', + '04-agent-system.md', + '05-engine-backends.md', + '06-integration-layer.md', + '07-gadgets.md', + '08-config-credentials.md', + '09-database.md', + '10-resilience.md', + ]; + for (const file of deepDiveFiles) { + expect(content).toContain(file); + } + }); + }); + + const deepDiveDocuments = [ + { + file: '01-services.md', + expectedHeading: 'Services and Deployment', + expectedSections: ['Router', 'Worker', 'Dashboard'], + }, + { + file: '02-webhook-pipeline.md', + expectedHeading: 'Webhook Pipeline', + expectedSections: ['Webhook Handler Factory', 'Platform Adapters'], + }, + { + file: '03-trigger-system.md', + expectedHeading: 'Trigger System', + expectedSections: ['TriggerRegistry', 'TriggerHandler', 'Built-in Triggers'], + }, + { + file: '04-agent-system.md', + expectedHeading: 'Agent System', + expectedSections: ['Agent Definitions', 'Capabilities', 'Prompts'], + }, + { + file: '05-engine-backends.md', + expectedHeading: 'Engine Backends', + expectedSections: ['AgentEngine Interface', 'Execution Adapter'], + }, + { + file: '06-integration-layer.md', + expectedHeading: 'Integration Layer', + expectedSections: ['IntegrationModule', 'IntegrationRegistry'], + }, + { + file: '07-gadgets.md', + expectedHeading: 'Gadgets', + expectedSections: ['Capability-to-Gadget Mapping', 'Built-in Gadgets'], + }, + { + file: '08-config-credentials.md', + expectedHeading: 'Configuration and Credentials', + expectedSections: ['Config Provider', 'Credential Resolution'], + }, + { + file: '09-database.md', + expectedHeading: 'Database', + expectedSections: ['Schema', 'Repositories'], + }, + { + file: '10-resilience.md', + expectedHeading: 'Resilience', + expectedSections: ['Watchdog', 'Concurrency Controls'], + }, + ]; + + describe.each(deepDiveDocuments)('$file', ({ file, expectedHeading, expectedSections }) => { + const filePath = path.join(ARCH_DIR, file); + + it('exists', () => { + expect(existsSync(filePath)).toBe(true); + }); + + it(`contains heading: ${expectedHeading}`, () => { + const content = readDoc(filePath); + expect(content).toContain(expectedHeading); + }); + + it('contains expected sections', () => { + const content = readDoc(filePath); + for (const section of expectedSections) { + expect(content).toContain(section); + } + }); + }); + + describe('cross-references', () => { + it('all relative .md links in hub document resolve to existing files', () => { + const hubPath = path.join(DOCS_ROOT, 'ARCHITECTURE.md'); + const content = readDoc(hubPath); + const links = extractMarkdownLinks(content); + + expect(links.length).toBeGreaterThan(0); + for (const link of links) { + const resolved = path.resolve(DOCS_ROOT, link); + expect(existsSync(resolved)).toBe(true); + } + }); + + it('all relative .md links in deep-dive documents resolve to existing files', () => { + for (const { file } of deepDiveDocuments) { + const filePath = path.join(ARCH_DIR, file); + if (!existsSync(filePath)) continue; + const content = readDoc(filePath); + const links = extractMarkdownLinks(content); + for (const link of links) { + const resolved = path.resolve(ARCH_DIR, link); + expect(existsSync(resolved)).toBe(true); + } + } + }); + }); +}); diff --git a/web/src/components/projects/project-harness-form.tsx b/web/src/components/projects/project-harness-form.tsx index 31f3e085..e6370e0a 100644 --- a/web/src/components/projects/project-harness-form.tsx +++ b/web/src/components/projects/project-harness-form.tsx @@ -42,7 +42,6 @@ function capitalize(s: string): string { return s.charAt(0).toUpperCase() + s.slice(1); } -// biome-ignore lint/complexity/noExcessiveCognitiveComplexity: multiple query dependencies and per-engine tab rendering for credentials and settings export function ProjectHarnessForm({ project }: { project: Project }) { const updateMutation = useProjectUpdate(project.id); const enginesQuery = useQuery(trpc.agentConfigs.engines.queryOptions()); From 597ec169e2d96915beec0af823bd915d865c39e5 Mon Sep 17 00:00:00 2001 From: aaight Date: Thu, 2 Apr 2026 09:57:19 +0200 Subject: [PATCH 02/52] feat(crypto): validate CREDENTIAL_MASTER_KEY format at startup (#1071) * feat(crypto): validate CREDENTIAL_MASTER_KEY format at startup * fix(deps): resolve high-severity lodash-es vulnerability via overrides Add npm overrides to force lodash-es>=4.18.1, resolving GHSA-r5fr-rjxr-66jc (Code Injection via _.template) and GHSA-f23m-r3pf-42rh (Prototype Pollution) which affected transitive deps chevrotain/js-toml/llmist. Also updates brace-expansion to 2.0.3 to resolve moderate GHSA-f886-m6hf-6m8v. Co-Authored-By: Claude Opus 4.6 --------- Co-authored-by: Cascade Bot Co-authored-by: Claude Opus 4.6 --- package-lock.json | 30 +++---- package.json | 3 + src/dashboard.ts | 7 ++ src/db/crypto.ts | 23 ++++++ src/router/index.ts | 7 ++ tests/unit/db/crypto.test.ts | 49 +++++++++++ tests/unit/router/startup-validation.test.ts | 86 ++++++++++++++++++++ 7 files changed, 191 insertions(+), 14 deletions(-) create mode 100644 tests/unit/router/startup-validation.test.ts diff --git a/package-lock.json b/package-lock.json index 48db4d53..d9eab24e 100644 --- a/package-lock.json +++ b/package-lock.json @@ -2391,9 +2391,9 @@ } }, "node_modules/@llmist/cli": { - "version": "16.0.3", - "resolved": "https://registry.npmjs.org/@llmist/cli/-/cli-16.0.3.tgz", - "integrity": "sha512-t45mK7foJpNyvmff2P7CwyN5Cw3Hd5shJjZB83THwGEsX69Nb1Q9NCLxnWSdN/AXT2COdsLUvDopoGedMcmCuw==", + "version": "16.1.0", + "resolved": "https://registry.npmjs.org/@llmist/cli/-/cli-16.1.0.tgz", + "integrity": "sha512-UO3294pwMeijqyo+3pg8m2dL9XZB4irDmKjHaSkY6Lk9YRH54E4HpqnaS1V23HWaKNQidNi9SAQXMGUqBOuemQ==", "hasInstallScript": true, "license": "MIT", "dependencies": { @@ -2406,7 +2406,7 @@ "jiti": "^2.6.1", "js-toml": "^1.0.2", "js-yaml": "^4.1.0", - "llmist": "^16.0.3", + "llmist": "^16.1.0", "marked": "^15.0.12", "marked-terminal": "^7.3.0", "zod": "^4.1.12" @@ -4724,7 +4724,9 @@ } }, "node_modules/brace-expansion": { - "version": "2.0.2", + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.3.tgz", + "integrity": "sha512-MCV/fYJEbqx68aE58kv2cA/kiky1G8vux3OR6/jbS+jIMe/6fJWa0DTzJU7dqijOWYwHi1t29FlfYI9uytqlpA==", "license": "MIT", "dependencies": { "balanced-match": "^1.0.0" @@ -7754,9 +7756,9 @@ "license": "MIT" }, "node_modules/llmist": { - "version": "16.0.4", - "resolved": "https://registry.npmjs.org/llmist/-/llmist-16.0.4.tgz", - "integrity": "sha512-W20kOXSZaW6n8ruKG8sWU2zH9fXpqsRN8QIs68NWBZT11HtwriOr7gMIaYu8rGRYjSGPIIufX8Xmo9oSg1DYUg==", + "version": "16.1.0", + "resolved": "https://registry.npmjs.org/llmist/-/llmist-16.1.0.tgz", + "integrity": "sha512-saSSxHR8onoD4KbVAz5wuB64SIdC33LRZk1EpukjsS0JSJb6r2Be/0ry55sPY2fiARHN6S7kWdxIJLRNSFISog==", "license": "MIT", "dependencies": { "@anthropic-ai/sdk": "^0.69.0", @@ -7803,15 +7805,15 @@ } }, "node_modules/lodash": { - "version": "4.17.23", - "resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.23.tgz", - "integrity": "sha512-LgVTMpQtIopCi79SJeDiP0TfWi5CNEc/L/aRdTh3yIvmZXTnheWpKjSZhnvMl8iXbC1tFg9gdHHDMLoV7CnG+w==", + "version": "4.18.1", + "resolved": "https://registry.npmjs.org/lodash/-/lodash-4.18.1.tgz", + "integrity": "sha512-dMInicTPVE8d1e5otfwmmjlxkZoUpiVLwyeTdUsi/Caj/gfzzblBcCE5sRHV/AsjuCmxWrte2TNGSYuCeCq+0Q==", "license": "MIT" }, "node_modules/lodash-es": { - "version": "4.17.23", - "resolved": "https://registry.npmjs.org/lodash-es/-/lodash-es-4.17.23.tgz", - "integrity": "sha512-kVI48u3PZr38HdYz98UmfPnXl2DXrpdctLrFLCd3kOx1xUkOmpFPx7gCWWM5MPkL/fD8zb+Ph0QzjGFs4+hHWg==", + "version": "4.18.1", + "resolved": "https://registry.npmjs.org/lodash-es/-/lodash-es-4.18.1.tgz", + "integrity": "sha512-J8xewKD/Gk22OZbhpOVSwcs60zhd95ESDwezOFuA3/099925PdHJ7OFHNTGtajL3AlZkykD32HykiMo+BIBI8A==", "license": "MIT" }, "node_modules/lodash.camelcase": { diff --git a/package.json b/package.json index a22fdb1c..a38faf99 100644 --- a/package.json +++ b/package.json @@ -130,5 +130,8 @@ }, "engines": { "node": ">=22.0.0" + }, + "overrides": { + "lodash-es": "^4.18.1" } } diff --git a/src/dashboard.ts b/src/dashboard.ts index e3d781c7..be01ec14 100644 --- a/src/dashboard.ts +++ b/src/dashboard.ts @@ -28,6 +28,7 @@ import { resolveUserFromSession } from './api/auth/session.js'; import { computeEffectiveOrgId } from './api/context.js'; import { appRouter } from './api/router.js'; import { registerBuiltInEngines } from './backends/bootstrap.js'; +import { validateCredentialMasterKey } from './db/crypto.js'; import { captureException, flush, setTag } from './sentry.js'; import { buildCorsMiddleware } from './utils/corsConfig.js'; @@ -106,6 +107,12 @@ app.onError((err, c) => { const port = Number(process.env.PORT) || 3001; async function startDashboard(): Promise { + const keyValidation = validateCredentialMasterKey(); + if (!keyValidation.valid) { + console.error(`[Dashboard] ${keyValidation.reason}`); + process.exit(1); + } + await initPrompts(); console.log(`[Dashboard] Starting on port ${port}`); serve({ fetch: app.fetch, port }); diff --git a/src/db/crypto.ts b/src/db/crypto.ts index 741f67a4..e1375ea7 100644 --- a/src/db/crypto.ts +++ b/src/db/crypto.ts @@ -22,6 +22,29 @@ export function isEncryptionEnabled(): boolean { return !!process.env.CREDENTIAL_MASTER_KEY; } +/** + * Validates the format of CREDENTIAL_MASTER_KEY without throwing. + * Returns `{ valid: true }` if the key is unset (encryption is opt-in) or correctly formatted. + * Returns `{ valid: false, reason: string }` if the key is set but malformed. + */ +export function validateCredentialMasterKey(): { valid: true } | { valid: false; reason: string } { + const hex = process.env.CREDENTIAL_MASTER_KEY; + if (!hex) return { valid: true }; + if (hex.length !== KEY_LENGTH * 2) { + return { + valid: false, + reason: `CREDENTIAL_MASTER_KEY must be a ${KEY_LENGTH * 2}-char hex string (${KEY_LENGTH} bytes). Got ${hex.length} chars.`, + }; + } + if (!/^[0-9a-fA-F]+$/.test(hex)) { + return { + valid: false, + reason: `CREDENTIAL_MASTER_KEY contains non-hex characters. Must be a ${KEY_LENGTH * 2}-char hex string.`, + }; + } + return { valid: true }; +} + /** Returns true if the value has the encrypted-value prefix. */ export function isEncryptedValue(value: string): boolean { return value.startsWith(PREFIX); diff --git a/src/router/index.ts b/src/router/index.ts index 61182e02..7ad590d3 100644 --- a/src/router/index.ts +++ b/src/router/index.ts @@ -6,6 +6,7 @@ import '../integrations/bootstrap.js'; import { initPrompts } from '../agents/prompts/index.js'; import { registerBuiltInEngines } from '../backends/bootstrap.js'; import { initAgentMessages } from '../config/agentMessages.js'; +import { validateCredentialMasterKey } from '../db/crypto.js'; import { seedAgentDefinitions } from '../db/seeds/seedAgentDefinitions.js'; import { registerBuiltInTriggers } from '../triggers/builtins.js'; import { createTriggerRegistry } from '../triggers/registry.js'; @@ -193,6 +194,12 @@ process.on('unhandledRejection', (reason) => { async function startRouter(): Promise { const port = Number(process.env.PORT) || 3000; + const keyValidation = validateCredentialMasterKey(); + if (!keyValidation.valid) { + logger.error('Invalid CREDENTIAL_MASTER_KEY', { reason: keyValidation.reason }); + process.exit(1); + } + // Seed built-in agent definitions to DB, then initialize in-memory caches logger.info('Seeding agent definitions...'); await seedAgentDefinitions(); diff --git a/tests/unit/db/crypto.test.ts b/tests/unit/db/crypto.test.ts index 4b243985..68edac00 100644 --- a/tests/unit/db/crypto.test.ts +++ b/tests/unit/db/crypto.test.ts @@ -6,6 +6,7 @@ import { isEncryptedValue, isEncryptionEnabled, reEncryptCredential, + validateCredentialMasterKey, } from '../../../src/db/crypto.js'; // Generate a valid 32-byte hex key for tests @@ -186,4 +187,52 @@ describe('crypto', () => { ); }); }); + + describe('validateCredentialMasterKey', () => { + it('returns valid when CREDENTIAL_MASTER_KEY is not set', () => { + vi.stubEnv('CREDENTIAL_MASTER_KEY', ''); + const result = validateCredentialMasterKey(); + expect(result).toEqual({ valid: true }); + }); + + it('returns valid for a correct 64-char hex key', () => { + vi.stubEnv('CREDENTIAL_MASTER_KEY', TEST_KEY); + const result = validateCredentialMasterKey(); + expect(result).toEqual({ valid: true }); + }); + + it('returns invalid for a key that is too short', () => { + vi.stubEnv('CREDENTIAL_MASTER_KEY', 'abcd'); + const result = validateCredentialMasterKey(); + expect(result.valid).toBe(false); + if (!result.valid) { + expect(result.reason).toContain('64-char hex string'); + } + }); + + it('returns invalid for a key that is too long', () => { + vi.stubEnv('CREDENTIAL_MASTER_KEY', 'a'.repeat(128)); + const result = validateCredentialMasterKey(); + expect(result.valid).toBe(false); + if (!result.valid) { + expect(result.reason).toContain('64-char hex string'); + } + }); + + it('returns invalid for non-hex characters', () => { + // 63 valid hex chars + 1 invalid 'g' + vi.stubEnv('CREDENTIAL_MASTER_KEY', `${'a'.repeat(63)}g`); + const result = validateCredentialMasterKey(); + expect(result.valid).toBe(false); + if (!result.valid) { + expect(result.reason).toContain('non-hex'); + } + }); + + it('returns valid for uppercase hex', () => { + vi.stubEnv('CREDENTIAL_MASTER_KEY', TEST_KEY.toUpperCase()); + const result = validateCredentialMasterKey(); + expect(result).toEqual({ valid: true }); + }); + }); }); diff --git a/tests/unit/router/startup-validation.test.ts b/tests/unit/router/startup-validation.test.ts new file mode 100644 index 00000000..521c175c --- /dev/null +++ b/tests/unit/router/startup-validation.test.ts @@ -0,0 +1,86 @@ +import { randomBytes } from 'node:crypto'; +import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest'; +import { validateCredentialMasterKey } from '../../../src/db/crypto.js'; + +/** + * Tests for startup validation behavior in router and dashboard. + * + * Both startRouter() and startDashboard() call validateCredentialMasterKey() + * and process.exit(1) if the key is malformed. Since both modules auto-execute + * at import time, we test the startup validation logic by verifying the + * validateCredentialMasterKey() + process.exit integration directly. + * + * This simulates the pattern used in both entry points: + * const keyValidation = validateCredentialMasterKey(); + * if (!keyValidation.valid) { + * ; + * process.exit(1); + * } + */ + +// Generate a valid 32-byte hex key for tests +const TEST_KEY = randomBytes(32).toString('hex'); + +describe('startup validation logic (router + dashboard pattern)', () => { + let processExitSpy: ReturnType; + + beforeEach(() => { + processExitSpy = vi.spyOn(process, 'exit').mockImplementation((_code?: number) => { + throw new Error(`process.exit(${_code})`); + }); + }); + + afterEach(() => { + vi.unstubAllEnvs(); + processExitSpy.mockRestore(); + }); + + /** + * Simulates the startup validation block used in startRouter() and startDashboard(). + */ + function runStartupValidation(): void { + const keyValidation = validateCredentialMasterKey(); + if (!keyValidation.valid) { + process.exit(1); + } + } + + describe('when CREDENTIAL_MASTER_KEY is invalid', () => { + it('calls process.exit(1) for a too-short key', () => { + vi.stubEnv('CREDENTIAL_MASTER_KEY', 'tooshort'); + + expect(() => runStartupValidation()).toThrow('process.exit(1)'); + expect(processExitSpy).toHaveBeenCalledWith(1); + }); + + it('calls process.exit(1) for a key with non-hex characters', () => { + vi.stubEnv('CREDENTIAL_MASTER_KEY', 'g'.repeat(64)); + + expect(() => runStartupValidation()).toThrow('process.exit(1)'); + expect(processExitSpy).toHaveBeenCalledWith(1); + }); + + it('calls process.exit(1) for a key that is too long', () => { + vi.stubEnv('CREDENTIAL_MASTER_KEY', 'a'.repeat(128)); + + expect(() => runStartupValidation()).toThrow('process.exit(1)'); + expect(processExitSpy).toHaveBeenCalledWith(1); + }); + }); + + describe('when CREDENTIAL_MASTER_KEY is valid or unset', () => { + it('does NOT call process.exit when key is unset', () => { + vi.stubEnv('CREDENTIAL_MASTER_KEY', ''); + + expect(() => runStartupValidation()).not.toThrow(); + expect(processExitSpy).not.toHaveBeenCalled(); + }); + + it('does NOT call process.exit when key is a valid 64-char hex string', () => { + vi.stubEnv('CREDENTIAL_MASTER_KEY', TEST_KEY); + + expect(() => runStartupValidation()).not.toThrow(); + expect(processExitSpy).not.toHaveBeenCalled(); + }); + }); +}); From 10fadd1f9df7ab742386543ddf91ed8ce5e8249f Mon Sep 17 00:00:00 2001 From: aaight Date: Thu, 2 Apr 2026 10:36:20 +0200 Subject: [PATCH 03/52] fix(security): invalidate user sessions on password change (#1070) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * fix(security): invalidate user sessions on password change * fix(deps): add overrides to resolve high-severity npm audit vulnerabilities Add npm overrides for lodash@^4.18.1, lodash-es@^4.18.1, and brace-expansion@^2.0.3 to address high-severity CVEs in transitive dependencies (archiver → lodash, @llmist/cli → chevrotain → lodash-es). Co-Authored-By: Claude Opus 4.6 --------- Co-authored-by: Cascade Bot Co-authored-by: Claude Opus 4.6 Co-authored-by: zbigniew sobiecki --- package.json | 4 ++- src/api/routers/users.ts | 7 ++++ src/db/repositories/usersRepository.ts | 15 ++++++++ tests/unit/api/routers/users.test.ts | 34 +++++++++++++++++++ .../db/repositories/usersRepository.test.ts | 31 +++++++++++++++++ 5 files changed, 90 insertions(+), 1 deletion(-) diff --git a/package.json b/package.json index a38faf99..738e6835 100644 --- a/package.json +++ b/package.json @@ -132,6 +132,8 @@ "node": ">=22.0.0" }, "overrides": { - "lodash-es": "^4.18.1" + "lodash": "^4.18.1", + "lodash-es": "^4.18.1", + "brace-expansion": "^2.0.3" } } diff --git a/src/api/routers/users.ts b/src/api/routers/users.ts index b81ab6af..cb26009c 100644 --- a/src/api/routers/users.ts +++ b/src/api/routers/users.ts @@ -4,6 +4,7 @@ import { z } from 'zod'; import { createUser, deleteUser, + deleteUserSessions, getUserById, listOrgUsers, updateUser, @@ -122,6 +123,12 @@ export const usersRouter = router({ } await updateUser(input.id, updates); + + // Invalidate all sessions for the target user when their password changes. + // This prevents stale sessions from remaining valid after a password reset. + if (updates.passwordHash !== undefined) { + await deleteUserSessions(input.id); + } }), delete: adminProcedure.input(z.object({ id: z.string() })).mutation(async ({ ctx, input }) => { diff --git a/src/db/repositories/usersRepository.ts b/src/db/repositories/usersRepository.ts index a7cddeb7..f927ae8f 100644 --- a/src/db/repositories/usersRepository.ts +++ b/src/db/repositories/usersRepository.ts @@ -84,6 +84,21 @@ export async function deleteExpiredSessions(): Promise { await db.delete(sessions).where(lt(sessions.expiresAt, new Date())); } +/** + * Delete all sessions for a given user. Optionally exclude a specific token + * (e.g. to preserve the caller's own session when they change their own password). + */ +export async function deleteUserSessions(userId: string, excludeToken?: string): Promise { + const db = getDb(); + if (excludeToken !== undefined) { + await db + .delete(sessions) + .where(and(eq(sessions.userId, userId), ne(sessions.token, excludeToken))); + } else { + await db.delete(sessions).where(eq(sessions.userId, userId)); + } +} + // ============================================================================ // CRUD for users (org-scoped) // ============================================================================ diff --git a/tests/unit/api/routers/users.test.ts b/tests/unit/api/routers/users.test.ts index 11b6493a..5cd943e3 100644 --- a/tests/unit/api/routers/users.test.ts +++ b/tests/unit/api/routers/users.test.ts @@ -8,6 +8,7 @@ const { mockUpdateUser, mockDeleteUser, mockGetUserById, + mockDeleteUserSessions, mockBcryptHash, } = vi.hoisted(() => ({ mockListOrgUsers: vi.fn(), @@ -15,6 +16,7 @@ const { mockUpdateUser: vi.fn(), mockDeleteUser: vi.fn(), mockGetUserById: vi.fn(), + mockDeleteUserSessions: vi.fn(), mockBcryptHash: vi.fn(), })); @@ -24,6 +26,7 @@ vi.mock('../../../../src/db/repositories/usersRepository.js', () => ({ updateUser: mockUpdateUser, deleteUser: mockDeleteUser, getUserById: mockGetUserById, + deleteUserSessions: mockDeleteUserSessions, })); vi.mock('bcrypt', () => ({ @@ -43,6 +46,7 @@ const mockMember = createMockUser({ id: 'member-1', role: 'member' }); describe('usersRouter', () => { beforeEach(() => { mockBcryptHash.mockResolvedValue('hashed-password'); + mockDeleteUserSessions.mockResolvedValue(undefined); }); describe('list', () => { @@ -406,6 +410,36 @@ describe('usersRouter', () => { code: 'FORBIDDEN', }); }); + + it('invalidates all sessions when password is changed', async () => { + mockGetUserById.mockResolvedValue({ id: 'user-2', orgId: 'org-1', role: 'member' }); + mockUpdateUser.mockResolvedValue(undefined); + const caller = createCaller({ user: mockAdminUser, effectiveOrgId: mockAdminUser.orgId }); + + await caller.update({ id: 'user-2', password: 'newpassword12' }); + + expect(mockDeleteUserSessions).toHaveBeenCalledWith('user-2'); + }); + + it('does not invalidate sessions when password is not changed', async () => { + mockGetUserById.mockResolvedValue({ id: 'user-2', orgId: 'org-1', role: 'member' }); + mockUpdateUser.mockResolvedValue(undefined); + const caller = createCaller({ user: mockAdminUser, effectiveOrgId: mockAdminUser.orgId }); + + await caller.update({ id: 'user-2', name: 'New Name' }); + + expect(mockDeleteUserSessions).not.toHaveBeenCalled(); + }); + + it('does not invalidate sessions when only role/email are changed', async () => { + mockGetUserById.mockResolvedValue({ id: 'user-2', orgId: 'org-1', role: 'member' }); + mockUpdateUser.mockResolvedValue(undefined); + const caller = createCaller({ user: mockAdminUser, effectiveOrgId: mockAdminUser.orgId }); + + await caller.update({ id: 'user-2', email: 'new@example.com' }); + + expect(mockDeleteUserSessions).not.toHaveBeenCalled(); + }); }); describe('delete', () => { diff --git a/tests/unit/db/repositories/usersRepository.test.ts b/tests/unit/db/repositories/usersRepository.test.ts index 61c37f97..ccb1b546 100644 --- a/tests/unit/db/repositories/usersRepository.test.ts +++ b/tests/unit/db/repositories/usersRepository.test.ts @@ -29,6 +29,7 @@ import { deleteExpiredSessions, deleteSession, deleteUser, + deleteUserSessions, getSessionByToken, getUserByEmail, getUserById, @@ -288,4 +289,34 @@ describe('usersRepository', () => { expect(mockDb.db.delete).toHaveBeenCalledTimes(1); }); }); + + describe('deleteUserSessions', () => { + it('deletes all sessions for a user', async () => { + mockDb.chain.where.mockResolvedValueOnce(undefined); + + await deleteUserSessions('user-1'); + + expect(mockDb.db.delete).toHaveBeenCalledTimes(1); + }); + + it('deletes all sessions when excludeToken is not provided', async () => { + mockDb.chain.where.mockResolvedValueOnce(undefined); + + await deleteUserSessions('user-1'); + + // Without excludeToken the where clause uses a single eq condition (no and/ne) + expect(mockDb.db.delete).toHaveBeenCalledTimes(1); + expect(mockDb.chain.where).toHaveBeenCalledTimes(1); + }); + + it('excludes a specific token when provided', async () => { + mockDb.chain.where.mockResolvedValueOnce(undefined); + + await deleteUserSessions('user-1', 'keep-this-token'); + + // With excludeToken the where clause uses an and(eq, ne) condition + expect(mockDb.db.delete).toHaveBeenCalledTimes(1); + expect(mockDb.chain.where).toHaveBeenCalledTimes(1); + }); + }); }); From fc828a37fb9745e2809567dd06e8d15bf19f9797 Mon Sep 17 00:00:00 2001 From: aaight Date: Thu, 2 Apr 2026 11:47:59 +0200 Subject: [PATCH 04/52] fix(cors): throw hard error in production when CORS_ORIGIN is unset (#1072) Co-authored-by: Cascade Bot --- src/utils/corsConfig.ts | 14 ++++----- tests/unit/utils/corsConfig.test.ts | 46 +++++++++-------------------- 2 files changed, 20 insertions(+), 40 deletions(-) diff --git a/src/utils/corsConfig.ts b/src/utils/corsConfig.ts index a646aeb4..ca1d3a68 100644 --- a/src/utils/corsConfig.ts +++ b/src/utils/corsConfig.ts @@ -3,7 +3,7 @@ * * Selects the appropriate CORS middleware options based on environment: * - When CORS_ORIGIN is set: allow only those origins (comma-separated) - * - When unset in production: warn and allow no origins (restrictive default) + * - When unset in production: throws an error at startup (hard failure) * - When unset outside production: default to localhost:5173 (dev convenience) */ @@ -28,15 +28,14 @@ export interface CorsConfig { * Returns a ready-to-use Hono `cors()` middleware configured for the current * environment: * - If `corsOriginEnv` is set (comma-separated), only those origins are allowed. - * - If `corsOriginEnv` is unset in production, a warning is logged and an empty - * origin list is used (blocks all cross-origin requests). + * - If `corsOriginEnv` is unset in production, throws an `Error` to crash the + * process at startup with a clear, actionable message. * - If `corsOriginEnv` is unset outside production, `http://localhost:5173` is * used as a dev-friendly default. */ export function buildCorsMiddleware({ corsOriginEnv, isProduction, - warn = console.warn, }: CorsConfigOptions): ReturnType { const origins = corsOriginEnv ?.split(',') @@ -48,12 +47,11 @@ export function buildCorsMiddleware({ } if (isProduction) { - warn( - '[Dashboard] WARNING: CORS_ORIGIN is not set in production. ' + - 'Using restrictive default (no origins allowed). ' + + throw new Error( + '[Dashboard] CORS_ORIGIN is not set. ' + + 'This is required in production. ' + 'Set CORS_ORIGIN to your frontend URL (e.g., https://dashboard.example.com).', ); - return cors({ origin: [], credentials: true }); } // Development default diff --git a/tests/unit/utils/corsConfig.test.ts b/tests/unit/utils/corsConfig.test.ts index 8147d400..54d25dec 100644 --- a/tests/unit/utils/corsConfig.test.ts +++ b/tests/unit/utils/corsConfig.test.ts @@ -77,40 +77,22 @@ describe('buildCorsMiddleware', () => { }); describe('when CORS_ORIGIN is not set AND NODE_ENV=production', () => { - it('logs a warning at startup', () => { - const warn = vi.fn(); - buildCorsMiddleware({ - corsOriginEnv: undefined, - isProduction: true, - warn, - }); - - expect(warn).toHaveBeenCalledOnce(); - expect(warn).toHaveBeenCalledWith( - expect.stringContaining('CORS_ORIGIN is not set in production'), - ); - }); - - it('blocks all cross-origin requests (empty origin list)', async () => { - const middleware = buildCorsMiddleware({ - corsOriginEnv: undefined, - isProduction: true, - warn: vi.fn(), - }); - - const res = await fetchWithOrigin(middleware, 'https://any-origin.example.com'); - expect(res.headers.get('Access-Control-Allow-Origin')).toBeNull(); + it('throws an error at startup', () => { + expect(() => + buildCorsMiddleware({ + corsOriginEnv: undefined, + isProduction: true, + }), + ).toThrowError(/CORS_ORIGIN is not set/); }); - it('also blocks localhost when in production without CORS_ORIGIN', async () => { - const middleware = buildCorsMiddleware({ - corsOriginEnv: undefined, - isProduction: true, - warn: vi.fn(), - }); - - const res = await fetchWithOrigin(middleware, 'http://localhost:5173'); - expect(res.headers.get('Access-Control-Allow-Origin')).toBeNull(); + it('throws an error with an actionable message', () => { + expect(() => + buildCorsMiddleware({ + corsOriginEnv: undefined, + isProduction: true, + }), + ).toThrowError(/Set CORS_ORIGIN to your frontend URL/); }); }); From 4bb4e595d53d590597a4e5f05d42b30f31382e13 Mon Sep 17 00:00:00 2001 From: aaight Date: Thu, 2 Apr 2026 11:58:05 +0200 Subject: [PATCH 05/52] fix(deps): align Zod version across monorepo to ^3.25.0 (bridge) (#1073) Co-authored-by: Cascade Bot --- CLAUDE.md | 11 +++++++++++ package-lock.json | 2 +- package.json | 2 +- web/package-lock.json | 8 ++++---- web/package.json | 2 +- 5 files changed, 18 insertions(+), 7 deletions(-) diff --git a/CLAUDE.md b/CLAUDE.md index 97ed544f..0b3eb1db 100644 --- a/CLAUDE.md +++ b/CLAUDE.md @@ -92,6 +92,17 @@ npm run lint:fix # Fix npm run typecheck # Type check ``` +### Zod Version Policy + +Both the root workspace and the `web/` workspace **must use the same Zod major version**. Currently both are aligned on `zod@^3.25.0` (the bridge version that ships v3 and v4 dual exports). + +- **Root (`package.json`)**: `"zod": "^3.25.0"` — backend uses the v3 API surface +- **Web (`web/package.json`)**: `"zod": "^3.25.0"` — frontend also uses v3 API surface + +**Why this matters**: `web/tsconfig.json` includes `../src/api/**/*` and `../src/db/**/*` (backend files that import from `zod`). If the two workspaces resolve different Zod major versions, `z.infer<>` can silently compute different types for the same schema in backend vs. frontend compilation contexts. + +**When upgrading Zod**: Both workspaces must be bumped to the same new version together. A full migration to the v4 API would also require auditing `z.ZodType` usage (renamed class hierarchy in v4), `z.ZodIssueCode` (slightly different enum), and `.default()` behavior (eagerly evaluated in v4). + ### Git Hooks Lefthook runs pre-commit (lint, typecheck) and pre-push (unit tests, integration tests) hooks automatically. The pre-push hook auto-starts an ephemeral PostgreSQL via Docker (`npm run test:db:up`) for integration tests — Docker must be running. diff --git a/package-lock.json b/package-lock.json index d9eab24e..45f628a2 100644 --- a/package-lock.json +++ b/package-lock.json @@ -38,7 +38,7 @@ "pg": "^8.18.0", "trello.js": "^1.2.8", "zangief": "^1.0.5", - "zod": "^3.24.1" + "zod": "^3.25.0" }, "bin": { "cascade": "bin/cascade.js", diff --git a/package.json b/package.json index 738e6835..3ebfcfa1 100644 --- a/package.json +++ b/package.json @@ -83,7 +83,7 @@ "pg": "^8.18.0", "trello.js": "^1.2.8", "zangief": "^1.0.5", - "zod": "^3.24.1" + "zod": "^3.25.0" }, "devDependencies": { "@biomejs/biome": "^1.9.4", diff --git a/web/package-lock.json b/web/package-lock.json index 336b1610..5f89e557 100644 --- a/web/package-lock.json +++ b/web/package-lock.json @@ -26,7 +26,7 @@ "sonner": "^2.0.7", "tailwind-merge": "^3.3.0", "tw-animate-css": "^1.2.9", - "zod": "^4.3.6" + "zod": "^3.25.0" }, "devDependencies": { "@tailwindcss/vite": "^4.1.7", @@ -6104,9 +6104,9 @@ "license": "ISC" }, "node_modules/zod": { - "version": "4.3.6", - "resolved": "https://registry.npmjs.org/zod/-/zod-4.3.6.tgz", - "integrity": "sha512-rftlrkhHZOcjDwkGlnUtZZkvaPHCsDATp4pGpuOOMDaTdDDXF91wuVDJoWoPsKX/3YPQ5fHuF3STjcYyKr+Qhg==", + "version": "3.25.76", + "resolved": "https://registry.npmjs.org/zod/-/zod-3.25.76.tgz", + "integrity": "sha512-gzUt/qt81nXsFGKIFcC3YnfEAx5NkunCfnDlvuBSSFS02bcXu4Lmea0AFIUwbLWxWPx3d9p8S5QoaujKcNQxcQ==", "license": "MIT", "funding": { "url": "https://github.com/sponsors/colinhacks" diff --git a/web/package.json b/web/package.json index fe4e1149..b31429fa 100644 --- a/web/package.json +++ b/web/package.json @@ -28,7 +28,7 @@ "sonner": "^2.0.7", "tailwind-merge": "^3.3.0", "tw-animate-css": "^1.2.9", - "zod": "^4.3.6" + "zod": "^3.25.0" }, "devDependencies": { "@tailwindcss/vite": "^4.1.7", From 211f0128b7f2fc9c6aaed0ee2fdfefd5d602c096 Mon Sep 17 00:00:00 2001 From: aaight Date: Thu, 2 Apr 2026 12:09:57 +0200 Subject: [PATCH 06/52] feat(prompts): add documentation maintenance instructions to agent prompts (#1074) Co-authored-by: Cascade Bot --- .../prompts/templates/implementation.eta | 3 + .../partials/documentation-maintenance.eta | 42 ++++++ src/agents/prompts/templates/planning.eta | 1 + .../prompts/templates/resolve-conflicts.eta | 2 + .../prompts/templates/respond-to-ci.eta | 2 + .../templates/respond-to-pr-comment.eta | 3 + .../prompts/templates/respond-to-review.eta | 3 + src/agents/prompts/templates/review.eta | 11 ++ tests/unit/agents/prompts.test.ts | 127 ++++++++++++++++++ 9 files changed, 194 insertions(+) create mode 100644 src/agents/prompts/templates/partials/documentation-maintenance.eta diff --git a/src/agents/prompts/templates/implementation.eta b/src/agents/prompts/templates/implementation.eta index 8dd220c8..0e581f75 100644 --- a/src/agents/prompts/templates/implementation.eta +++ b/src/agents/prompts/templates/implementation.eta @@ -106,6 +106,8 @@ Use `PostComment` to post a summary on the <%= it.workItemNoun || 'card' %> once <%~ include("partials/test-protocol") %> +<%~ include("partials/documentation-maintenance") %> + ### Completion Protocol (CRITICAL) NEVER mark acceptance criteria or checklist items complete until ALL of: @@ -113,5 +115,6 @@ NEVER mark acceptance criteria or checklist items complete until ALL of: 2. Tests pass (zero failures) 3. Lint passes (zero errors) 4. PR is successfully created (and you have the PR number) +5. Documentation updated (if changes affect documented behavior) <%~ include("partials/rules-efficiency") %> diff --git a/src/agents/prompts/templates/partials/documentation-maintenance.eta b/src/agents/prompts/templates/partials/documentation-maintenance.eta new file mode 100644 index 00000000..a74b0eb1 --- /dev/null +++ b/src/agents/prompts/templates/partials/documentation-maintenance.eta @@ -0,0 +1,42 @@ +### Documentation Maintenance + +When your code changes affect documented behavior, update the relevant docs as part of the same PR. + +#### When to Update Docs + +Check for doc updates if your change: +- Adds, removes, or renames a public function, class, env var, or config option +- Changes how a feature behaves from the user or developer perspective +- Introduces a new architectural pattern or module convention +- Modifies setup, deployment, or configuration steps + +Skip doc updates for: internal refactors with no external behavior change, test-only changes, or +bug fixes that restore behavior already described in docs. + +#### What to Check + +- **CLAUDE.md / AGENTS.md** — Developer conventions, setup steps, commands +- **README.md** — User-facing overview, installation, usage +- **docs/** — Architecture guides, feature documentation +- **JSDoc / inline comments** — Function signatures, complex logic explanations + +#### How to Find Relevant Docs + +Use RipGrep to locate docs that reference the feature or symbol you changed: + +```bash +# Find docs mentioning a function/feature name +RipGrep(pattern="functionName|FeatureName", glob="**/*.md") + +# Find JSDoc for a specific function +RipGrep(pattern="@param|@returns", glob="src/path/to/file.ts") +``` + +#### Documentation Update Checklist + +Before marking your work complete, verify: +- [ ] Public API changes are reflected in JSDoc (params, return types, thrown errors) +- [ ] New env vars or config options are documented in CLAUDE.md or README +- [ ] New architectural patterns are noted in the relevant docs/ file +- [ ] Removed/renamed features no longer appear in docs as current behavior + diff --git a/src/agents/prompts/templates/planning.eta b/src/agents/prompts/templates/planning.eta index c7206eb1..1e5db1ce 100644 --- a/src/agents/prompts/templates/planning.eta +++ b/src/agents/prompts/templates/planning.eta @@ -36,6 +36,7 @@ You are running in a cloned copy of the project repository. Before creating your 3. **Trace integrations** - Understand how components connect (APIs, event flows, shared state, pub/sub) 4. **Map dependencies** - What libraries, utilities, and shared modules does related code use? 5. **Note conventions** - Naming patterns, file organization, error handling, logging practices +6. **Check documentation** - Scan CLAUDE.md, README.md, and docs/ to identify which docs cover the area being changed; include a doc update step in your plan if the change affects documented behavior (public APIs, env vars, architecture, setup steps) **When planning your solution:** - Reuse existing utilities and abstractions rather than creating new ones diff --git a/src/agents/prompts/templates/resolve-conflicts.eta b/src/agents/prompts/templates/resolve-conflicts.eta index 55ed7c7a..84b2600e 100644 --- a/src/agents/prompts/templates/resolve-conflicts.eta +++ b/src/agents/prompts/templates/resolve-conflicts.eta @@ -165,6 +165,8 @@ Use `UpdatePRComment` with the comment ID from session state. <%~ include("partials/test-protocol") %> +<%~ include("partials/documentation-maintenance") %> + <%~ include("partials/verification-protocol") %> <%~ include("partials/rules-efficiency") %> diff --git a/src/agents/prompts/templates/respond-to-ci.eta b/src/agents/prompts/templates/respond-to-ci.eta index f4884dbd..e8f32894 100644 --- a/src/agents/prompts/templates/respond-to-ci.eta +++ b/src/agents/prompts/templates/respond-to-ci.eta @@ -131,6 +131,8 @@ Use `UpdatePRComment` with the comment ID from session state. <%~ include("partials/test-protocol") %> +<%~ include("partials/documentation-maintenance") %> + <%~ include("partials/verification-protocol") %> <%~ include("partials/rules-efficiency") %> diff --git a/src/agents/prompts/templates/respond-to-pr-comment.eta b/src/agents/prompts/templates/respond-to-pr-comment.eta index 5c9633f5..0912ab92 100644 --- a/src/agents/prompts/templates/respond-to-pr-comment.eta +++ b/src/agents/prompts/templates/respond-to-pr-comment.eta @@ -91,6 +91,7 @@ Use `UpdatePRComment` with the comment ID from session state. - Don't make unrelated changes - If the request conflicts with codebase conventions, follow conventions and explain why - Follow existing code patterns and conventions +- If changes affect documented behavior, update relevant docs (CLAUDE.md, README, JSDoc) ### Code Quality - ALWAYS run tests and lint before committing @@ -99,6 +100,8 @@ Use `UpdatePRComment` with the comment ID from session state. <%~ include("partials/test-protocol") %> +<%~ include("partials/documentation-maintenance") %> + <%~ include("partials/verification-protocol") %> <%~ include("partials/rules-efficiency") %> \ No newline at end of file diff --git a/src/agents/prompts/templates/respond-to-review.eta b/src/agents/prompts/templates/respond-to-review.eta index 20ae51d9..3753b2ba 100644 --- a/src/agents/prompts/templates/respond-to-review.eta +++ b/src/agents/prompts/templates/respond-to-review.eta @@ -86,6 +86,7 @@ Fixed! Updated the function to handle the edge case by adding a null check at li - Reply to EACH comment after fixing it - If you can't address a comment (unclear, out of scope, disagree), reply explaining why - Follow existing code patterns and conventions +- If changes affect documented behavior, update relevant docs (CLAUDE.md, README, JSDoc) ### Code Quality - ALWAYS run tests and lint before committing @@ -94,6 +95,8 @@ Fixed! Updated the function to handle the edge case by adding a null check at li <%~ include("partials/test-protocol") %> +<%~ include("partials/documentation-maintenance") %> + <%~ include("partials/verification-protocol") %> <%~ include("partials/rules-efficiency") %> diff --git a/src/agents/prompts/templates/review.eta b/src/agents/prompts/templates/review.eta index 05b901f0..8795dd32 100644 --- a/src/agents/prompts/templates/review.eta +++ b/src/agents/prompts/templates/review.eta @@ -154,6 +154,17 @@ Style and preferences - mention only if egregious: - **Build-time config**: For features requiring build-time injection, verify the injection point exists - **When uncertain**: Ask in your review rather than assuming deployment is configured correctly +### Documentation +- **Currency**: Do CLAUDE.md, README.md, and docs/ still accurately describe features changed by this PR? +- **New features**: Are user-facing or developer-facing features introduced without any documentation? +- **Stale references**: Does the PR remove or rename something that's still referenced in docs? +- **JSDoc**: Are new public functions/classes missing parameter or return type documentation? + +Severity guide: +- Missing docs for a user-facing feature or new env var = **SHOULD_FIX** +- Missing JSDoc on a new public API = **NITPICK** +- Actively misleading docs (describe behavior that no longer exists) = **SHOULD_FIX** + ## Strategic Questions Answer these during Phase 1 — they catch design problems that line-by-line review misses: diff --git a/tests/unit/agents/prompts.test.ts b/tests/unit/agents/prompts.test.ts index 7873ba80..10a2741a 100644 --- a/tests/unit/agents/prompts.test.ts +++ b/tests/unit/agents/prompts.test.ts @@ -15,6 +15,7 @@ vi.mock('../../../src/agents/definitions/index.js', () => ({ 'respond-to-planning-comment', 'debug', 'backlog-manager', + 'resolve-conflicts', ]), })); @@ -398,6 +399,7 @@ describe('readTemplateFileSync', () => { 'respond-to-planning-comment', 'debug', 'backlog-manager', + 'resolve-conflicts', ]; for (const agentType of builtinTypes) { const content = readTemplateFileSync(agentType); @@ -655,6 +657,7 @@ describe('duplicate content detection', () => { 'respond-to-planning-comment', 'debug', 'backlog-manager', + 'resolve-conflicts', ]; for (const agentType of allAgentTypes) { @@ -827,3 +830,127 @@ describe('squintEnabled template gating', () => { expect(rendered).not.toContain('squint-features'); }); }); + +describe('documentation-maintenance partial', () => { + it('partial exists in getAvailablePartialNames()', () => { + const names = getAvailablePartialNames(); + expect(names).toContain('documentation-maintenance'); + }); + + it('partial contains key doc-update phrases', () => { + const content = getRawPartial('documentation-maintenance'); + expect(content).toContain('CLAUDE.md'); + expect(content).toContain('README'); + expect(content).toContain('JSDoc'); + expect(content).toContain('docs/'); + }); + + it('partial describes when to update docs (conditional guidance)', () => { + const content = getRawPartial('documentation-maintenance'); + expect(content).toContain('When to'); + }); + + it('partial provides a documentation update checklist', () => { + const content = getRawPartial('documentation-maintenance'); + expect(content).toContain('Documentation Update Checklist'); + }); +}); + +describe('documentation maintenance in code-modifying agent prompts', () => { + it('implementation prompt contains documentation maintenance section', () => { + const prompt = getSystemPrompt('implementation'); + expect(prompt).toContain('Documentation Maintenance'); + expect(prompt).toContain('CLAUDE.md'); + expect(prompt).toContain('JSDoc'); + }); + + it('implementation prompt completion protocol includes documentation step', () => { + const prompt = getSystemPrompt('implementation'); + expect(prompt).toContain('Documentation updated'); + }); + + it('respond-to-review prompt contains documentation maintenance section', () => { + const prompt = getSystemPrompt('respond-to-review'); + expect(prompt).toContain('Documentation Maintenance'); + expect(prompt).toContain('CLAUDE.md'); + }); + + it('respond-to-review prompt scope section mentions documentation updates', () => { + const prompt = getSystemPrompt('respond-to-review'); + expect(prompt).toContain('documented behavior'); + }); + + it('respond-to-ci prompt contains documentation maintenance section', () => { + const prompt = getSystemPrompt('respond-to-ci'); + expect(prompt).toContain('Documentation Maintenance'); + expect(prompt).toContain('CLAUDE.md'); + }); + + it('respond-to-pr-comment prompt contains documentation maintenance section', () => { + const prompt = getSystemPrompt('respond-to-pr-comment'); + expect(prompt).toContain('Documentation Maintenance'); + expect(prompt).toContain('CLAUDE.md'); + }); + + it('respond-to-pr-comment prompt scope section mentions documentation updates', () => { + const prompt = getSystemPrompt('respond-to-pr-comment'); + expect(prompt).toContain('documented behavior'); + }); + + it('resolve-conflicts prompt contains documentation maintenance section', () => { + const prompt = getSystemPrompt('resolve-conflicts'); + expect(prompt).toContain('Documentation Maintenance'); + expect(prompt).toContain('CLAUDE.md'); + }); +}); + +describe('documentation review checks in review agent', () => { + it('review prompt contains Documentation subsection under What to Verify', () => { + const prompt = getSystemPrompt('review'); + expect(prompt).toContain('### Documentation'); + }); + + it('review prompt covers documentation currency', () => { + const prompt = getSystemPrompt('review'); + expect(prompt).toContain('Currency'); + }); + + it('review prompt covers undocumented new features', () => { + const prompt = getSystemPrompt('review'); + expect(prompt).toContain('New features'); + }); + + it('review prompt covers stale references in docs', () => { + const prompt = getSystemPrompt('review'); + expect(prompt).toContain('Stale references'); + }); + + it('review prompt includes SHOULD_FIX severity for missing user-facing docs', () => { + const prompt = getSystemPrompt('review'); + expect(prompt).toContain('SHOULD_FIX'); + }); + + it('review prompt does NOT include documentation-maintenance partial (reports gaps, does not fix)', () => { + const prompt = getSystemPrompt('review'); + // The partial's checklist heading should not be present in review + expect(prompt).not.toContain('Documentation Update Checklist'); + }); +}); + +describe('documentation planning in planning agent', () => { + it('planning prompt contains documentation check as step 6 in pattern analysis', () => { + const prompt = getSystemPrompt('planning'); + expect(prompt).toContain('Check documentation'); + }); + + it('planning prompt includes guidance to add doc update steps to plans', () => { + const prompt = getSystemPrompt('planning'); + expect(prompt).toContain('doc update step'); + }); + + it('planning prompt does NOT include documentation-maintenance partial', () => { + const prompt = getSystemPrompt('planning'); + // The partial's checklist heading should not be in planning prompt + expect(prompt).not.toContain('Documentation Update Checklist'); + }); +}); From 2b7559f04a75de9360f90bb6479dd603a0140bd4 Mon Sep 17 00:00:00 2001 From: aaight Date: Thu, 2 Apr 2026 12:26:29 +0200 Subject: [PATCH 07/52] chore(deps): upgrade @biomejs/biome from 1.9.4 to 2.4.10 (#1075) Co-authored-by: Cascade Bot --- bin/cascade.js | 1 + biome.json | 15 ++- package-lock.json | 69 +++++----- package.json | 2 +- src/agents/capabilities/resolver.ts | 12 +- src/agents/definitions/contextSteps.ts | 7 +- src/agents/definitions/index.ts | 61 +++++---- src/agents/definitions/loader.ts | 2 +- src/agents/definitions/strategies.ts | 2 +- src/agents/prompts/index.ts | 2 +- src/agents/shared/builderFactory.ts | 4 +- src/agents/shared/capabilities.ts | 18 +-- src/agents/shared/executionPipeline.ts | 1 + src/agents/shared/modelResolution.ts | 2 +- src/agents/shared/prFormatting.ts | 4 +- src/agents/shared/syntheticCalls.ts | 2 +- src/agents/utils/agentLoop.ts | 2 +- src/agents/utils/checklistSync.ts | 2 +- src/agents/utils/hooks.ts | 2 +- src/agents/utils/index.ts | 13 +- src/api/routers/runs.ts | 2 +- src/api/trpc.ts | 2 +- src/backends/adapter.ts | 2 +- src/backends/claude-code/index.ts | 10 +- src/backends/claude-code/messageProcessing.ts | 2 +- src/backends/codex/index.ts | 10 +- src/backends/index.ts | 35 +++-- src/backends/llmist/index.ts | 2 +- src/backends/opencode/index.ts | 8 +- src/backends/opencode/server.ts | 2 +- src/backends/opencode/stream.ts | 5 +- src/backends/progressMonitor.ts | 2 +- src/backends/shared/continuationLoop.ts | 2 +- src/backends/shared/envBuilder.ts | 2 +- src/backends/sidecarManager.ts | 2 +- src/cli/dashboard/_shared/base.ts | 2 +- src/cli/pm/add-checklist.ts | 2 +- src/cli/session/finish.ts | 3 +- src/config/retryConfig.ts | 2 +- .../agentDefinitionsRepository.ts | 2 +- src/db/repositories/configRepository.ts | 4 +- src/db/repositories/joinHelpers.ts | 2 +- src/db/repositories/prWorkItemsRepository.ts | 4 +- src/db/repositories/runStatsRepository.ts | 4 +- src/db/repositories/runsRepository.ts | 17 ++- src/db/repositories/settingsRepository.ts | 4 +- src/db/repositories/webhookLogsRepository.ts | 2 +- src/db/schema/index.ts | 8 +- src/db/schema/projectCredentials.ts | 3 +- src/gadgets/AstGrep.ts | 4 +- src/gadgets/Finish.ts | 3 +- src/gadgets/ListDirectory.ts | 2 +- src/gadgets/github/index.ts | 16 +-- src/gadgets/pm/AddChecklist.ts | 2 +- src/gadgets/pm/core/readWorkItem.ts | 2 +- src/gadgets/pm/index.ts | 10 +- src/gadgets/sentry/index.ts | 2 +- src/gadgets/shared/diagnosticState.ts | 2 +- src/gadgets/shared/index.ts | 14 +- src/gadgets/tmux.ts | 6 +- src/gadgets/tmux/TmuxGadget.ts | 2 +- src/gadgets/tmux/index.ts | 6 +- src/gadgets/todo/TodoUpdateStatus.ts | 2 +- src/gadgets/todo/TodoUpsert.ts | 2 +- src/gadgets/todo/index.ts | 5 +- src/gadgets/todo/storage.ts | 2 +- src/integrations/index.ts | 4 +- src/pm/index.ts | 45 +++---- src/router/ackMessageGenerator.ts | 2 +- src/router/adapters/github.ts | 4 +- src/router/adapters/jira.ts | 2 +- src/router/adapters/sentry.ts | 2 +- src/router/adapters/trello.ts | 2 +- src/router/container-manager.ts | 10 +- src/router/platformClients/index.ts | 6 +- src/router/platformClients/types.ts | 1 + src/router/reactions.ts | 4 +- src/sentry/alerting-integration.ts | 2 +- src/triggers/github/index.ts | 4 +- src/triggers/github/pr-ready-to-merge.ts | 2 +- src/triggers/index.ts | 12 +- src/triggers/jira/index.ts | 4 +- src/triggers/shared/agent-execution.ts | 4 +- src/triggers/shared/agent-pm-poster.ts | 2 +- src/triggers/shared/trigger-check.ts | 2 +- src/triggers/trello/index.ts | 6 +- src/triggers/trello/status-changed.ts | 2 +- src/triggers/types.ts | 3 +- src/utils/fileLogger.ts | 2 +- src/utils/index.ts | 14 +- src/utils/llmLogging.ts | 2 +- src/utils/logging.ts | 2 +- src/webhook/webhookHandlers.ts | 2 +- src/worker-entry.ts | 2 +- tests/docker/claude-code-auth/verify-auth.ts | 2 +- .../db/agentDefinitionsRepository.test.ts | 2 +- .../db/agentTriggerConfigsRepository.test.ts | 2 +- .../integration/db/projectsRepository.test.ts | 2 +- tests/integration/github-personas.test.ts | 2 +- tests/integration/helpers/seed.ts | 24 +--- .../unit/agents/capabilities/resolver.test.ts | 2 +- .../agents/definitions/contextSteps.test.ts | 2 +- .../definitions/pipelineSnapshot.test.ts | 2 +- tests/unit/agents/hooks.test.ts | 2 +- tests/unit/agents/registry.test.ts | 2 +- tests/unit/agents/shared/gadgets.test.ts | 2 +- tests/unit/agents/shared/repository.test.ts | 3 - tests/unit/agents/shared/runTracking.test.ts | 4 +- .../unit/agents/shared/syntheticCalls.test.ts | 2 +- tests/unit/agents/utils/agentLoop.test.ts | 2 +- tests/unit/agents/utils/logging.test.ts | 2 +- tests/unit/agents/utils/setup.test.ts | 2 +- tests/unit/agents/utils/tracking.test.ts | 2 +- tests/unit/api/access-control.test.ts | 3 +- tests/unit/api/auth/logout.test.ts | 2 +- tests/unit/api/auth/rateLimiter.test.ts | 4 +- tests/unit/api/auth/session.test.ts | 2 +- .../api/routers/_shared/triggerTypes.test.ts | 2 +- tests/unit/api/routers/agentConfigs.test.ts | 2 +- .../api/routers/agentTriggerConfigs.test.ts | 2 +- tests/unit/api/routers/organization.test.ts | 2 +- tests/unit/api/routers/projects.test.ts | 2 +- tests/unit/api/routers/prompts.test.ts | 2 +- tests/unit/api/routers/runs.test.ts | 2 +- tests/unit/api/routers/users.test.ts | 2 +- tests/unit/api/routers/webhookLogs.test.ts | 2 +- tests/unit/api/routers/webhooks.test.ts | 4 +- tests/unit/api/trpc.test.ts | 2 +- tests/unit/backends/accumulator.test.ts | 2 +- tests/unit/backends/adapter.test.ts | 6 +- tests/unit/backends/agent-profiles.test.ts | 9 +- tests/unit/backends/claude-code-hooks.test.ts | 2 +- .../backends/claude-code-imagePrompt.test.ts | 1 - tests/unit/backends/claude-code.test.ts | 4 +- tests/unit/backends/codex.test.ts | 2 +- tests/unit/backends/githubPoster.test.ts | 2 +- tests/unit/backends/nativeToolRuntime.test.ts | 6 +- tests/unit/backends/opencode-stream.test.ts | 8 +- tests/unit/backends/opencode.test.ts | 2 +- tests/unit/backends/pmPoster.test.ts | 2 +- tests/unit/backends/postProcess.test.ts | 2 +- tests/unit/backends/progressModel.test.ts | 4 +- tests/unit/backends/progressState.test.ts | 2 +- tests/unit/backends/registry.test.ts | 2 +- tests/unit/backends/secretBuilder.test.ts | 2 +- tests/unit/backends/shared-envBuilder.test.ts | 2 +- tests/unit/backends/shared-envFilter.test.ts | 2 +- .../backends/shared-llmCallLogger.test.ts | 2 +- .../backends/shared-nativeToolPrompts.test.ts | 3 +- tests/unit/backends/sidecarManager.test.ts | 2 +- tests/unit/cli/cli-command-factory.test.ts | 2 +- tests/unit/cli/dashboard/client.test.ts | 2 +- .../cli/dashboard/prompts/prompts.test.ts | 2 +- tests/unit/cli/dashboard/spinner.test.ts | 2 +- tests/unit/cli/file-input-flags.test.ts | 11 +- tests/unit/cli/pm/pm-commands.test.ts | 15 +-- tests/unit/cli/scm/scm-commands.test.ts | 17 ++- tests/unit/config/agentMessages.test.ts | 4 +- tests/unit/config/integrationRoles.test.ts | 4 +- tests/unit/config/projects.test.ts | 2 +- tests/unit/config/provider.test.ts | 2 +- tests/unit/config/rateLimits.test.ts | 4 +- tests/unit/config/reviewConfig.test.ts | 2 +- tests/unit/config/statusUpdateConfig.test.ts | 2 +- tests/unit/db/client.test.ts | 2 +- tests/unit/db/crypto.test.ts | 2 +- .../unit/db/repositories/configMapper.test.ts | 4 +- .../db/repositories/configRepository.test.ts | 2 +- .../repositories/runStatsRepository.test.ts | 3 +- .../runsRepository-concurrency.test.ts | 3 +- .../runsRepository.dashboard.test.ts | 2 +- .../webhookLogsRepository.test.ts | 4 +- tests/unit/gadgets/astGrep.test.ts | 1 - tests/unit/gadgets/github.test.ts | 4 +- .../unit/gadgets/github/core/createPR.test.ts | 24 ++-- tests/unit/gadgets/github/core/misc.test.ts | 2 +- .../unit/gadgets/pm/core/addChecklist.test.ts | 2 +- .../gadgets/pm/core/createWorkItem.test.ts | 2 +- .../pm/core/deleteChecklistItem.test.ts | 2 +- .../gadgets/pm/core/listWorkItems.test.ts | 2 +- .../unit/gadgets/pm/core/moveWorkItem.test.ts | 2 +- .../unit/gadgets/pm/core/readWorkItem.test.ts | 2 +- .../pm/core/updateChecklistItem.test.ts | 2 +- .../gadgets/pm/core/updateWorkItem.test.ts | 2 +- .../unit/gadgets/session/core/finish.test.ts | 2 +- tests/unit/gadgets/sessionState.test.ts | 2 +- tests/unit/gadgets/shared/factories.test.ts | 2 +- .../gadgets/shared/pathValidation.test.ts | 2 +- tests/unit/gadgets/tmux.test.ts | 12 +- .../gadgets/tmux/TmuxControlClient.test.ts | 2 +- tests/unit/gadgets/tmux/TmuxGadget.test.ts | 2 +- tests/unit/gadgets/todo-storage.test.ts | 2 +- tests/unit/gadgets/todo.test.ts | 4 +- tests/unit/gadgets/todo/todoDelete.test.ts | 2 +- tests/unit/github/client.test.ts | 7 +- tests/unit/github/integration.test.ts | 2 +- tests/unit/github/personas.test.ts | 2 +- tests/unit/instrument.test.ts | 7 - tests/unit/pm/context.test.ts | 2 +- tests/unit/pm/media.test.ts | 4 +- tests/unit/queue/cancel.test.ts | 2 - tests/unit/queue/client.test.ts | 1 - tests/unit/router/ackMessageGenerator.test.ts | 2 +- tests/unit/router/adapters/github.test.ts | 2 +- tests/unit/router/adapters/jira.test.ts | 2 +- tests/unit/router/adapters/sentry.test.ts | 2 +- tests/unit/router/adapters/trello.test.ts | 2 +- tests/unit/router/config.test.ts | 6 +- tests/unit/router/platformClients.test.ts | 2 +- tests/unit/router/snapshot-manager.test.ts | 2 +- tests/unit/router/trello.test.ts | 2 +- tests/unit/router/webhook-processor.test.ts | 4 +- tests/unit/router/work-item-lock.test.ts | 2 - tests/unit/sentry.test.ts | 2 - tests/unit/trello/client.test.ts | 5 +- tests/unit/triggers/agent-execution.test.ts | 2 +- .../triggers/agent-result-handler.test.ts | 2 +- tests/unit/triggers/budget.test.ts | 2 +- tests/unit/triggers/builtins.test.ts | 2 +- .../unit/triggers/check-suite-failure.test.ts | 4 +- .../unit/triggers/check-suite-success.test.ts | 3 +- tests/unit/triggers/debug-runner.test.ts | 1 + tests/unit/triggers/debug-trigger.test.ts | 2 +- .../unit/triggers/github-integration.test.ts | 2 +- .../github-pr-comment-mention.test.ts | 6 +- tests/unit/triggers/pr-merged.test.ts | 7 +- tests/unit/triggers/pr-opened.test.ts | 1 + tests/unit/triggers/pr-ready-to-merge.test.ts | 7 +- .../unit/triggers/pr-review-submitted.test.ts | 1 + tests/unit/triggers/review-requested.test.ts | 1 + .../triggers/shared/agent-pm-poster.test.ts | 2 +- tests/unit/utils/llmLogging.test.ts | 2 +- tests/unit/utils/logging.test.ts | 2 +- tests/unit/utils/safeOperation.test.ts | 2 +- tests/unit/utils/webhookLogger.test.ts | 2 +- tests/unit/web/pm-wizard-state.test.ts | 11 +- tests/unit/web/project-navigation.test.ts | 2 +- tests/unit/web/triggerAgentMapping.test.ts | 2 +- tests/unit/webhook/webhookHandlers.test.ts | 2 +- tests/unit/webhook/webhookLogging.test.ts | 2 +- tests/unit/worker-entry.test.ts | 9 +- tools/rotate-credential-key.ts | 2 +- tools/seed-config-from-json.ts | 2 +- web/src/app.tsx | 2 +- web/src/components/debug/debug-analysis.tsx | 4 +- .../global/organization-form-dialog.tsx | 8 +- web/src/components/layout/breadcrumbs.tsx | 6 +- web/src/components/layout/header.tsx | 4 +- web/src/components/layout/mobile-sidebar.tsx | 2 +- web/src/components/layout/sidebar.tsx | 26 ++-- .../components/llm-calls/llm-call-detail.tsx | 4 +- .../components/llm-calls/llm-call-list.tsx | 7 +- web/src/components/logs/log-viewer.tsx | 4 +- .../projects/agent-prompt-overrides.tsx | 6 +- .../components/projects/integration-form.tsx | 16 +-- .../projects/pm-wizard-common-steps.tsx | 3 +- .../components/projects/pm-wizard-hooks.ts | 5 +- .../projects/pm-wizard-jira-steps.tsx | 5 +- .../projects/pm-wizard-trello-steps.tsx | 7 +- web/src/components/projects/pm-wizard.tsx | 4 +- .../projects/project-agent-configs.tsx | 8 +- .../projects/project-form-dialog.tsx | 4 +- .../projects/project-general-form.tsx | 10 +- .../projects/project-harness-form.tsx | 6 +- .../project-lifecycle-automations.tsx | 9 +- .../projects/project-secret-field.tsx | 7 +- .../projects/project-work-table.tsx | 5 +- .../components/projects/projects-table.tsx | 8 +- .../components/projects/use-project-update.ts | 2 +- web/src/components/projects/wizard-shared.tsx | 3 +- web/src/components/runs/cancel-run-button.tsx | 124 +++++++++--------- .../runs/project-work-duration-chart.tsx | 6 +- web/src/components/runs/retry-run-button.tsx | 4 +- web/src/components/runs/run-filters.tsx | 2 +- web/src/components/runs/run-summary-card.tsx | 4 +- web/src/components/runs/runs-table.tsx | 2 +- .../components/runs/trigger-run-dialog.tsx | 4 +- .../components/runs/work-item-cost-chart.tsx | 2 +- .../components/runs/work-item-runs-table.tsx | 2 +- .../settings/agent-config-form-dialog.tsx | 6 +- .../settings/agent-configs-table.tsx | 6 +- .../settings/agent-definition-editor.tsx | 2 +- .../settings/agent-definition-prompts.tsx | 5 +- .../settings/agent-definition-sections.tsx | 2 +- .../settings/agent-definition-shared.tsx | 5 +- .../settings/agent-definition-table.tsx | 6 +- web/src/components/settings/model-field.tsx | 2 +- .../settings/openrouter-model-combobox.tsx | 4 +- web/src/components/settings/org-form.tsx | 4 +- web/src/components/settings/prompt-editor.tsx | 6 +- .../settings/useDefinitionEditor.ts | 3 +- .../components/settings/user-form-dialog.tsx | 4 +- web/src/components/settings/users-table.tsx | 6 +- web/src/components/shared/trigger-toggles.tsx | 2 +- web/src/components/ui/badge.tsx | 2 +- web/src/components/ui/button.tsx | 2 +- web/src/components/ui/card.tsx | 2 +- web/src/components/ui/combobox.tsx | 4 +- web/src/components/ui/form.tsx | 8 +- web/src/components/ui/table.tsx | 2 +- web/src/components/ui/tabs.tsx | 4 +- .../webhooklogs/webhooklog-detail-dialog.tsx | 2 +- web/src/index.css | 3 +- web/src/lib/org-context.tsx | 5 +- web/src/lib/trigger-agent-mapping.ts | 6 +- web/src/routes/__root.tsx | 8 +- web/src/routes/global/definitions.tsx | 10 +- web/src/routes/global/organizations.tsx | 6 +- web/src/routes/global/runs.tsx | 6 +- web/src/routes/global/webhook-logs.tsx | 6 +- web/src/routes/index.tsx | 10 +- web/src/routes/login.tsx | 2 +- .../projects/$projectId.agent-configs.tsx | 2 +- .../routes/projects/$projectId.general.tsx | 4 +- .../routes/projects/$projectId.harness.tsx | 4 +- .../projects/$projectId.integrations.tsx | 2 +- .../routes/projects/$projectId.lifecycle.tsx | 2 +- web/src/routes/projects/$projectId.stats.tsx | 8 +- web/src/routes/projects/$projectId.tsx | 2 +- web/src/routes/projects/$projectId.work.tsx | 4 +- web/src/routes/projects/index.tsx | 6 +- web/src/routes/prs/$projectId.$prNumber.tsx | 6 +- web/src/routes/runs/$runId.tsx | 8 +- web/src/routes/settings/general.tsx | 2 +- web/src/routes/settings/users.tsx | 6 +- .../work-items/$projectId.$workItemId.tsx | 6 +- 326 files changed, 784 insertions(+), 841 deletions(-) diff --git a/bin/cascade.js b/bin/cascade.js index c194df91..886f324d 100755 --- a/bin/cascade.js +++ b/bin/cascade.js @@ -1,3 +1,4 @@ #!/usr/bin/env node import { execute } from '@oclif/core'; + await execute({ dir: import.meta.url }); diff --git a/biome.json b/biome.json index 6a7fcf8f..20972b0d 100644 --- a/biome.json +++ b/biome.json @@ -1,13 +1,11 @@ { - "$schema": "https://biomejs.dev/schemas/1.9.4/schema.json", + "$schema": "https://biomejs.dev/schemas/2.4.10/schema.json", "vcs": { "enabled": true, "clientKind": "git", "useIgnoreFile": true }, - "organizeImports": { - "enabled": true - }, + "assist": { "actions": { "source": { "organizeImports": "on" } } }, "linter": { "enabled": true, "rules": { @@ -34,6 +32,13 @@ } }, "files": { - "ignore": ["node_modules", "dist", "coverage", "*.json"] + "includes": [ + "**", + "!**/node_modules", + "!**/dist", + "!**/coverage", + "!**/*.json", + "!web/src/index.css" + ] } } diff --git a/package-lock.json b/package-lock.json index 45f628a2..77a5c967 100644 --- a/package-lock.json +++ b/package-lock.json @@ -45,7 +45,7 @@ "cascade-tools": "bin/cascade-tools.js" }, "devDependencies": { - "@biomejs/biome": "^1.9.4", + "@biomejs/biome": "^2.4.10", "@commitlint/cli": "^20.1.0", "@commitlint/config-conventional": "^20.0.0", "@testing-library/dom": "^10.4.1", @@ -277,9 +277,10 @@ } }, "node_modules/@biomejs/biome": { - "version": "1.9.4", + "version": "2.4.10", + "resolved": "https://registry.npmjs.org/@biomejs/biome/-/biome-2.4.10.tgz", + "integrity": "sha512-xxA3AphFQ1geij4JTHXv4EeSTda1IFn22ye9LdyVPoJU19fNVl0uzfEuhsfQ4Yue/0FaLs2/ccVi4UDiE7R30w==", "dev": true, - "hasInstallScript": true, "license": "MIT OR Apache-2.0", "bin": { "biome": "bin/biome" @@ -292,18 +293,20 @@ "url": "https://opencollective.com/biome" }, "optionalDependencies": { - "@biomejs/cli-darwin-arm64": "1.9.4", - "@biomejs/cli-darwin-x64": "1.9.4", - "@biomejs/cli-linux-arm64": "1.9.4", - "@biomejs/cli-linux-arm64-musl": "1.9.4", - "@biomejs/cli-linux-x64": "1.9.4", - "@biomejs/cli-linux-x64-musl": "1.9.4", - "@biomejs/cli-win32-arm64": "1.9.4", - "@biomejs/cli-win32-x64": "1.9.4" + "@biomejs/cli-darwin-arm64": "2.4.10", + "@biomejs/cli-darwin-x64": "2.4.10", + "@biomejs/cli-linux-arm64": "2.4.10", + "@biomejs/cli-linux-arm64-musl": "2.4.10", + "@biomejs/cli-linux-x64": "2.4.10", + "@biomejs/cli-linux-x64-musl": "2.4.10", + "@biomejs/cli-win32-arm64": "2.4.10", + "@biomejs/cli-win32-x64": "2.4.10" } }, "node_modules/@biomejs/cli-darwin-arm64": { - "version": "1.9.4", + "version": "2.4.10", + "resolved": "https://registry.npmjs.org/@biomejs/cli-darwin-arm64/-/cli-darwin-arm64-2.4.10.tgz", + "integrity": "sha512-vuzzI1cWqDVzOMIkYyHbKqp+AkQq4K7k+UCXWpkYcY/HDn1UxdsbsfgtVpa40shem8Kax4TLDLlx8kMAecgqiw==", "cpu": [ "arm64" ], @@ -318,9 +321,9 @@ } }, "node_modules/@biomejs/cli-darwin-x64": { - "version": "1.9.4", - "resolved": "https://registry.npmjs.org/@biomejs/cli-darwin-x64/-/cli-darwin-x64-1.9.4.tgz", - "integrity": "sha512-ngYBh/+bEedqkSevPVhLP4QfVPCpb+4BBe2p7Xs32dBgs7rh9nY2AIYUL6BgLw1JVXV8GlpKmb/hNiuIxfPfZg==", + "version": "2.4.10", + "resolved": "https://registry.npmjs.org/@biomejs/cli-darwin-x64/-/cli-darwin-x64-2.4.10.tgz", + "integrity": "sha512-14fzASRo+BPotwp7nWULy2W5xeUyFnTaq1V13Etrrxkrih+ez/2QfgFm5Ehtf5vSjtgx/IJycMMpn5kPd5ZNaA==", "cpu": [ "x64" ], @@ -335,9 +338,9 @@ } }, "node_modules/@biomejs/cli-linux-arm64": { - "version": "1.9.4", - "resolved": "https://registry.npmjs.org/@biomejs/cli-linux-arm64/-/cli-linux-arm64-1.9.4.tgz", - "integrity": "sha512-fJIW0+LYujdjUgJJuwesP4EjIBl/N/TcOX3IvIHJQNsAqvV2CHIogsmA94BPG6jZATS4Hi+xv4SkBBQSt1N4/g==", + "version": "2.4.10", + "resolved": "https://registry.npmjs.org/@biomejs/cli-linux-arm64/-/cli-linux-arm64-2.4.10.tgz", + "integrity": "sha512-7MH1CMW5uuxQ/s7FLST63qF8B3Hgu2HRdZ7tA1X1+mk+St4JOuIrqdhIBnnyqeyWJNI+Bww7Es5QZ0wIc1Cmkw==", "cpu": [ "arm64" ], @@ -352,9 +355,9 @@ } }, "node_modules/@biomejs/cli-linux-arm64-musl": { - "version": "1.9.4", - "resolved": "https://registry.npmjs.org/@biomejs/cli-linux-arm64-musl/-/cli-linux-arm64-musl-1.9.4.tgz", - "integrity": "sha512-v665Ct9WCRjGa8+kTr0CzApU0+XXtRgwmzIf1SeKSGAv+2scAlW6JR5PMFo6FzqqZ64Po79cKODKf3/AAmECqA==", + "version": "2.4.10", + "resolved": "https://registry.npmjs.org/@biomejs/cli-linux-arm64-musl/-/cli-linux-arm64-musl-2.4.10.tgz", + "integrity": "sha512-WrJY6UuiSD/Dh+nwK2qOTu8kdMDlLV3dLMmychIghHPAysWFq1/DGC1pVZx8POE3ZkzKR3PUUnVrtZfMfaJjyQ==", "cpu": [ "arm64" ], @@ -369,9 +372,9 @@ } }, "node_modules/@biomejs/cli-linux-x64": { - "version": "1.9.4", - "resolved": "https://registry.npmjs.org/@biomejs/cli-linux-x64/-/cli-linux-x64-1.9.4.tgz", - "integrity": "sha512-lRCJv/Vi3Vlwmbd6K+oQ0KhLHMAysN8lXoCI7XeHlxaajk06u7G+UsFSO01NAs5iYuWKmVZjmiOzJ0OJmGsMwg==", + "version": "2.4.10", + "resolved": "https://registry.npmjs.org/@biomejs/cli-linux-x64/-/cli-linux-x64-2.4.10.tgz", + "integrity": "sha512-tZLvEEi2u9Xu1zAqRjTcpIDGVtldigVvzug2fTuPG0ME/g8/mXpRPcNgLB22bGn6FvLJpHHnqLnwliOu8xjYrg==", "cpu": [ "x64" ], @@ -386,9 +389,9 @@ } }, "node_modules/@biomejs/cli-linux-x64-musl": { - "version": "1.9.4", - "resolved": "https://registry.npmjs.org/@biomejs/cli-linux-x64-musl/-/cli-linux-x64-musl-1.9.4.tgz", - "integrity": "sha512-gEhi/jSBhZ2m6wjV530Yy8+fNqG8PAinM3oV7CyO+6c3CEh16Eizm21uHVsyVBEB6RIM8JHIl6AGYCv6Q6Q9Tg==", + "version": "2.4.10", + "resolved": "https://registry.npmjs.org/@biomejs/cli-linux-x64-musl/-/cli-linux-x64-musl-2.4.10.tgz", + "integrity": "sha512-kDTi3pI6PBN6CiczsWYOyP2zk0IJI08EWEQyDMQWW221rPaaEz6FvjLhnU07KMzLv8q3qSuoB93ua6inSQ55Tw==", "cpu": [ "x64" ], @@ -403,9 +406,9 @@ } }, "node_modules/@biomejs/cli-win32-arm64": { - "version": "1.9.4", - "resolved": "https://registry.npmjs.org/@biomejs/cli-win32-arm64/-/cli-win32-arm64-1.9.4.tgz", - "integrity": "sha512-tlbhLk+WXZmgwoIKwHIHEBZUwxml7bRJgk0X2sPyNR3S93cdRq6XulAZRQJ17FYGGzWne0fgrXBKpl7l4M87Hg==", + "version": "2.4.10", + "resolved": "https://registry.npmjs.org/@biomejs/cli-win32-arm64/-/cli-win32-arm64-2.4.10.tgz", + "integrity": "sha512-umwQU6qPzH+ISTf/eHyJ/QoQnJs3V9Vpjz2OjZXe9MVBZ7prgGafMy7yYeRGnlmDAn87AKTF3Q6weLoMGpeqdQ==", "cpu": [ "arm64" ], @@ -420,9 +423,9 @@ } }, "node_modules/@biomejs/cli-win32-x64": { - "version": "1.9.4", - "resolved": "https://registry.npmjs.org/@biomejs/cli-win32-x64/-/cli-win32-x64-1.9.4.tgz", - "integrity": "sha512-8Y5wMhVIPaWe6jw2H+KlEm4wP/f7EW3810ZLmDlrEEy5KvBsb9ECEfu/kMWD484ijfQ8+nIi0giMgu9g1UAuuA==", + "version": "2.4.10", + "resolved": "https://registry.npmjs.org/@biomejs/cli-win32-x64/-/cli-win32-x64-2.4.10.tgz", + "integrity": "sha512-aW/JU5GuyH4uxMrNYpoC2kjaHlyJGLgIa3XkhPEZI0uKhZhJZU8BuEyJmvgzSPQNGozBwWjC972RaNdcJ9KyJg==", "cpu": [ "x64" ], diff --git a/package.json b/package.json index 3ebfcfa1..5aad896c 100644 --- a/package.json +++ b/package.json @@ -86,7 +86,7 @@ "zod": "^3.25.0" }, "devDependencies": { - "@biomejs/biome": "^1.9.4", + "@biomejs/biome": "^2.4.10", "@commitlint/cli": "^20.1.0", "@commitlint/config-conventional": "^20.0.0", "@testing-library/dom": "^10.4.1", diff --git a/src/agents/capabilities/resolver.ts b/src/agents/capabilities/resolver.ts index 077907c7..c22e34a2 100644 --- a/src/agents/capabilities/resolver.ts +++ b/src/agents/capabilities/resolver.ts @@ -8,12 +8,6 @@ import { AstGrep } from '../../gadgets/AstGrep.js'; import { FileMultiEdit } from '../../gadgets/FileMultiEdit.js'; import { FileSearchAndReplace } from '../../gadgets/FileSearchAndReplace.js'; import { Finish } from '../../gadgets/Finish.js'; -import { ListDirectory } from '../../gadgets/ListDirectory.js'; -import { ReadFile } from '../../gadgets/ReadFile.js'; -import { RipGrep } from '../../gadgets/RipGrep.js'; -import { Sleep } from '../../gadgets/Sleep.js'; -import { VerifyChanges } from '../../gadgets/VerifyChanges.js'; -import { WriteFile } from '../../gadgets/WriteFile.js'; import { CreatePR, CreatePRReview, @@ -26,6 +20,7 @@ import { ReplyToReviewComment, UpdatePRComment, } from '../../gadgets/github/index.js'; +import { ListDirectory } from '../../gadgets/ListDirectory.js'; import { AddChecklist, CreateWorkItem, @@ -37,6 +32,9 @@ import { ReadWorkItem, UpdateWorkItem, } from '../../gadgets/pm/index.js'; +import { ReadFile } from '../../gadgets/ReadFile.js'; +import { RipGrep } from '../../gadgets/RipGrep.js'; +import { Sleep } from '../../gadgets/Sleep.js'; import { GetAlertingEventDetail, GetAlertingIssue, @@ -44,6 +42,8 @@ import { } from '../../gadgets/sentry/index.js'; import { Tmux } from '../../gadgets/tmux.js'; import { TodoDelete, TodoUpdateStatus, TodoUpsert } from '../../gadgets/todo/index.js'; +import { VerifyChanges } from '../../gadgets/VerifyChanges.js'; +import { WriteFile } from '../../gadgets/WriteFile.js'; import type { ToolManifest } from '../contracts/index.js'; import type { IntegrationCategory } from '../definitions/schema.js'; import { diff --git a/src/agents/definitions/contextSteps.ts b/src/agents/definitions/contextSteps.ts index 4f11ba94..f60ce477 100644 --- a/src/agents/definitions/contextSteps.ts +++ b/src/agents/definitions/contextSteps.ts @@ -6,21 +6,20 @@ */ import { execFileSync } from 'node:child_process'; - -import { ListDirectory } from '../../gadgets/ListDirectory.js'; import { formatCheckStatus } from '../../gadgets/github/core/getPRChecks.js'; +import { ListDirectory } from '../../gadgets/ListDirectory.js'; import { readWorkItem, readWorkItemWithMedia } from '../../gadgets/pm/core/readWorkItem.js'; import { formatSentryEvent } from '../../gadgets/sentry/core/format.js'; +import type { Todo } from '../../gadgets/todo/storage.js'; import { formatTodoList, getNextId, initTodoSession, saveTodos, } from '../../gadgets/todo/storage.js'; -import type { Todo } from '../../gadgets/todo/storage.js'; import { githubClient } from '../../github/client.js'; import { getJiraConfig, getTrelloConfig } from '../../pm/config.js'; -import { MAX_IMAGES_PER_WORK_ITEM, getPMProviderOrNull } from '../../pm/index.js'; +import { getPMProviderOrNull, MAX_IMAGES_PER_WORK_ITEM } from '../../pm/index.js'; import { getSentryClient } from '../../sentry/client.js'; import type { AgentInput, ProjectConfig } from '../../types/index.js'; import { parseRepoFullName } from '../../utils/repo.js'; diff --git a/src/agents/definitions/index.ts b/src/agents/definitions/index.ts index a339415b..2e6ea670 100644 --- a/src/agents/definitions/index.ts +++ b/src/agents/definitions/index.ts @@ -1,44 +1,43 @@ -export { - AgentDefinitionSchema, - IntegrationHooksSchema, - type AgentDefinition, - type AgentCapabilities, - type IntegrationHooks, - type TrailingHookFlags, - type FinishHookFlags, -} from './schema.js'; -export { - loadAgentDefinition, - loadAllAgentDefinitions, - getKnownAgentTypes, - clearDefinitionCache, - resolveAgentDefinition, - resolveAllAgentDefinitions, - resolveKnownAgentTypes, - invalidateDefinitionCache, - isPMFocusedAgent, -} from './loader.js'; -export { CONTEXT_STEP_REGISTRY } from './strategies.js'; -export type { FetchContextParams } from './contextSteps.js'; -export type { AgentProfile } from './profiles.js'; -export { getAgentProfile, getAgentCapabilities, needsGitStateStopHooks } from './profiles.js'; -export { getToolManifests } from './toolManifests.js'; - // Re-export capability system export { + buildGadgetsFromCapabilities, CAPABILITIES, CAPABILITY_REGISTRY, type Capability, type CapabilityDefinition, - getCapabilitiesByIntegration, - getCapabilityIntegration, - isBuiltInCapability, - isValidCapability, - buildGadgetsFromCapabilities, deriveIntegrations, deriveRequiredIntegrations, filterToolManifests, + getCapabilitiesByIntegration, + getCapabilityIntegration, getGadgetNamesFromCapabilities, getSdkToolsFromCapabilities, + isBuiltInCapability, + isValidCapability, resolveEffectiveCapabilities, } from '../capabilities/index.js'; +export type { FetchContextParams } from './contextSteps.js'; +export { + clearDefinitionCache, + getKnownAgentTypes, + invalidateDefinitionCache, + isPMFocusedAgent, + loadAgentDefinition, + loadAllAgentDefinitions, + resolveAgentDefinition, + resolveAllAgentDefinitions, + resolveKnownAgentTypes, +} from './loader.js'; +export type { AgentProfile } from './profiles.js'; +export { getAgentCapabilities, getAgentProfile, needsGitStateStopHooks } from './profiles.js'; +export { + type AgentCapabilities, + type AgentDefinition, + AgentDefinitionSchema, + type FinishHookFlags, + type IntegrationHooks, + IntegrationHooksSchema, + type TrailingHookFlags, +} from './schema.js'; +export { CONTEXT_STEP_REGISTRY } from './strategies.js'; +export { getToolManifests } from './toolManifests.js'; diff --git a/src/agents/definitions/loader.ts b/src/agents/definitions/loader.ts index 69de9b46..dabeafff 100644 --- a/src/agents/definitions/loader.ts +++ b/src/agents/definitions/loader.ts @@ -1,4 +1,4 @@ -import { readFileSync, readdirSync } from 'node:fs'; +import { readdirSync, readFileSync } from 'node:fs'; import { dirname, join } from 'node:path'; import { fileURLToPath } from 'node:url'; import yaml from 'js-yaml'; diff --git a/src/agents/definitions/strategies.ts b/src/agents/definitions/strategies.ts index 14276d89..dc4d4304 100644 --- a/src/agents/definitions/strategies.ts +++ b/src/agents/definitions/strategies.ts @@ -14,9 +14,9 @@ import { fetchAlertingIssueStep, fetchContextFilesStep, fetchDirectoryListingStep, + fetchPipelineSnapshotStep, fetchPRContextStep, fetchPRConversationStep, - fetchPipelineSnapshotStep, fetchSquintStep, fetchWorkItemStep, prepopulateTodosStep, diff --git a/src/agents/prompts/index.ts b/src/agents/prompts/index.ts index 36b1f3d7..0c3cc73e 100644 --- a/src/agents/prompts/index.ts +++ b/src/agents/prompts/index.ts @@ -1,4 +1,4 @@ -import { readFileSync, readdirSync } from 'node:fs'; +import { readdirSync, readFileSync } from 'node:fs'; import { dirname, join } from 'node:path'; import { fileURLToPath } from 'node:url'; import { Eta } from 'eta'; diff --git a/src/agents/shared/builderFactory.ts b/src/agents/shared/builderFactory.ts index ebe9a5b7..a214c39c 100644 --- a/src/agents/shared/builderFactory.ts +++ b/src/agents/shared/builderFactory.ts @@ -2,15 +2,15 @@ import { execSync } from 'node:child_process'; import { AgentBuilder, BudgetPricingUnavailableError, - type LLMist, type createLogger, + type LLMist, } from 'llmist'; import { getCompactionConfig } from '../../config/compactionConfig.js'; import { getIterationTrailingMessage } from '../../config/hintConfig.js'; import { getRateLimitForModel } from '../../config/rateLimits.js'; import { getRetryConfig } from '../../config/retryConfig.js'; -import { type SessionHooks, initSessionState, setReadOnlyFs } from '../../gadgets/sessionState.js'; +import { initSessionState, type SessionHooks, setReadOnlyFs } from '../../gadgets/sessionState.js'; import type { LLMCallLogger } from '../../utils/llmLogging.js'; import { resolveSquintDbPath } from '../../utils/squintDb.js'; import type { IProgressMonitor } from '../contracts/index.js'; diff --git a/src/agents/shared/capabilities.ts b/src/agents/shared/capabilities.ts index b620b624..f1cab599 100644 --- a/src/agents/shared/capabilities.ts +++ b/src/agents/shared/capabilities.ts @@ -8,30 +8,26 @@ * - '../definitions/schema.js' for AgentCapabilities type */ -// Re-export capability types -export type { Capability, AgentCapabilities } from '../definitions/schema.js'; - // Re-export capability functions export { + buildGadgetsFromCapabilities, CAPABILITIES, CAPABILITY_REGISTRY, - getCapabilitiesByIntegration, - getCapabilityIntegration, - isBuiltInCapability, - isValidCapability, -} from '../capabilities/index.js'; - -export { - buildGadgetsFromCapabilities, deriveIntegrations, deriveRequiredIntegrations, filterToolManifests, generateUnavailableCapabilitiesNote, + getCapabilitiesByIntegration, + getCapabilityIntegration, getGadgetNamesFromCapabilities, getSdkToolsFromCapabilities, getUnavailableOptionalCapabilities, + isBuiltInCapability, + isValidCapability, resolveEffectiveCapabilities, } from '../capabilities/index.js'; +// Re-export capability types +export type { AgentCapabilities, Capability } from '../definitions/schema.js'; import { resolveAgentDefinition } from '../definitions/index.js'; diff --git a/src/agents/shared/executionPipeline.ts b/src/agents/shared/executionPipeline.ts index 7f423908..171ed04c 100644 --- a/src/agents/shared/executionPipeline.ts +++ b/src/agents/shared/executionPipeline.ts @@ -15,6 +15,7 @@ export type AgentLogger = ReturnType; // Re-export from the canonical single definition in contracts. export type { LogWriter } from '../contracts/index.js'; + import type { LogWriter } from '../contracts/index.js'; /** diff --git a/src/agents/shared/modelResolution.ts b/src/agents/shared/modelResolution.ts index 47a4f0e5..28d1f149 100644 --- a/src/agents/shared/modelResolution.ts +++ b/src/agents/shared/modelResolution.ts @@ -3,9 +3,9 @@ import type { AgentInput, CascadeConfig, ProjectConfig } from '../../types/index import { logger } from '../../utils/logging.js'; import { resolveAgentDefinition } from '../definitions/loader.js'; import { - type PromptContext, buildTaskPromptContext, getSystemPrompt, + type PromptContext, renderCustomPrompt, renderInlineTaskPrompt, } from '../prompts/index.js'; diff --git a/src/agents/shared/prFormatting.ts b/src/agents/shared/prFormatting.ts index e0b1053b..a9c4eb67 100644 --- a/src/agents/shared/prFormatting.ts +++ b/src/agents/shared/prFormatting.ts @@ -1,7 +1,7 @@ import { readFile } from 'node:fs/promises'; import { join } from 'node:path'; -import { REVIEW_FILE_CONTENT_TOKEN_LIMIT, estimateTokens } from '../../config/reviewConfig.js'; +import { estimateTokens, REVIEW_FILE_CONTENT_TOKEN_LIMIT } from '../../config/reviewConfig.js'; import type { githubClient } from '../../github/client.js'; type PRDetails = Awaited>; @@ -10,7 +10,7 @@ type PRComments = Awaited>; type PRReviews = Awaited>; type PRIssueComments = Awaited>; -export type { PRDetails, PRDiff, PRComments, PRReviews, PRIssueComments }; +export type { PRComments, PRDetails, PRDiff, PRIssueComments, PRReviews }; export function formatPRDetails(prDetails: PRDetails): string { return [ diff --git a/src/agents/shared/syntheticCalls.ts b/src/agents/shared/syntheticCalls.ts index d33163ba..8fb47f66 100644 --- a/src/agents/shared/syntheticCalls.ts +++ b/src/agents/shared/syntheticCalls.ts @@ -2,7 +2,7 @@ import { imageFromBase64, text } from 'llmist'; import { logger } from '../../utils/logging.js'; import type { ContextImage } from '../contracts/index.js'; -import { type TrackingContext, recordSyntheticInvocationId } from '../utils/tracking.js'; +import { recordSyntheticInvocationId, type TrackingContext } from '../utils/tracking.js'; import type { BuilderType } from './builderFactory.js'; /** MIME types supported by the llmist SDK for image content parts. */ diff --git a/src/agents/utils/agentLoop.ts b/src/agents/utils/agentLoop.ts index 68959192..8a23eba6 100644 --- a/src/agents/utils/agentLoop.ts +++ b/src/agents/utils/agentLoop.ts @@ -14,12 +14,12 @@ import { } from '../../utils/interactive.js'; import type { createAgentLogger } from './logging.js'; import { - type TrackingContext, consumeLoopAction, consumeLoopWarning, incrementGadgetCall, isSyntheticCall, recordGadgetCallForLoop, + type TrackingContext, } from './tracking.js'; // ============================================================================ diff --git a/src/agents/utils/checklistSync.ts b/src/agents/utils/checklistSync.ts index 93ce43b6..f4571094 100644 --- a/src/agents/utils/checklistSync.ts +++ b/src/agents/utils/checklistSync.ts @@ -5,7 +5,7 @@ * items as local TODOs are completed. */ -import { type Todo, loadTodos } from '../../gadgets/todo/storage.js'; +import { loadTodos, type Todo } from '../../gadgets/todo/storage.js'; import { type ChecklistItem, getPMProvider } from '../../pm/index.js'; import type { Checklist } from '../../pm/types.js'; import { logger } from '../../utils/logging.js'; diff --git a/src/agents/utils/hooks.ts b/src/agents/utils/hooks.ts index 33c0e0a8..34a52f68 100644 --- a/src/agents/utils/hooks.ts +++ b/src/agents/utils/hooks.ts @@ -19,7 +19,7 @@ import type { LLMCallLogger } from '../../utils/llmLogging.js'; import { calculateCost } from '../../utils/llmMetrics.js'; import { logger } from '../../utils/logging.js'; import type { IProgressMonitor, LogWriter } from '../contracts/index.js'; -import { type TrackingContext, checkForLoopAndAdvance, incrementLLMIteration } from './tracking.js'; +import { checkForLoopAndAdvance, incrementLLMIteration, type TrackingContext } from './tracking.js'; // Re-export LogWriter for downstream consumers that imported it from here. export type { LogWriter } from '../contracts/index.js'; diff --git a/src/agents/utils/index.ts b/src/agents/utils/index.ts index ce74315f..cb3e1fc9 100644 --- a/src/agents/utils/index.ts +++ b/src/agents/utils/index.ts @@ -1,14 +1,13 @@ +export { type AgentRunResult, runAgentLoop, truncateContent } from './agentLoop.js'; + +export { type AgentLogger, createAgentLogger } from './logging.js'; export { - LOG_LEVELS, - getLogLevel, type ContextFile, - readContextFiles, type DependencyInstallResult, + getLogLevel, installDependencies, + LOG_LEVELS, + readContextFiles, type TypeScriptWarmResult, warmTypeScriptCache, } from './setup.js'; - -export { type AgentLogger, createAgentLogger } from './logging.js'; - -export { type AgentRunResult, runAgentLoop, truncateContent } from './agentLoop.js'; diff --git a/src/api/routers/runs.ts b/src/api/routers/runs.ts index d7a29179..e0f85182 100644 --- a/src/api/routers/runs.ts +++ b/src/api/routers/runs.ts @@ -3,8 +3,8 @@ import { z } from 'zod'; import { loadProjectConfigById } from '../../config/provider.js'; import { isAgentEnabledForProject } from '../../db/repositories/agentConfigsRepository.js'; import { - DEFAULT_STALE_RUN_THRESHOLD_MS, cancelRunById, + DEFAULT_STALE_RUN_THRESHOLD_MS, deleteDebugAnalysisByRunId, getDebugAnalysisByRunId, getLlmCallByNumber, diff --git a/src/api/trpc.ts b/src/api/trpc.ts index 698addb1..47cd077a 100644 --- a/src/api/trpc.ts +++ b/src/api/trpc.ts @@ -1,4 +1,4 @@ -import { TRPCError, initTRPC } from '@trpc/server'; +import { initTRPC, TRPCError } from '@trpc/server'; export interface TRPCUser { id: string; diff --git a/src/backends/adapter.ts b/src/backends/adapter.ts index 68e7d0bc..20c8a8a4 100644 --- a/src/backends/adapter.ts +++ b/src/backends/adapter.ts @@ -1,5 +1,5 @@ import { getAgentProfile } from '../agents/definitions/profiles.js'; -import { type PipelineContext, executeAgentPipeline } from '../agents/shared/executionPipeline.js'; +import { executeAgentPipeline, type PipelineContext } from '../agents/shared/executionPipeline.js'; import { setupRepository } from '../agents/shared/repository.js'; import { finalizeEngineRun, tryCreateRun } from '../agents/shared/runTracking.js'; import { createAgentLogger } from '../agents/utils/logging.js'; diff --git a/src/backends/claude-code/index.ts b/src/backends/claude-code/index.ts index 71c717e7..64bf1600 100644 --- a/src/backends/claude-code/index.ts +++ b/src/backends/claude-code/index.ts @@ -1,15 +1,15 @@ -import { constants, accessSync, existsSync, readdirSync, statSync, writeFileSync } from 'node:fs'; +import { accessSync, constants, existsSync, readdirSync, statSync, writeFileSync } from 'node:fs'; import { rm } from 'node:fs/promises'; import { homedir } from 'node:os'; import path from 'node:path'; -import { query } from '@anthropic-ai/claude-agent-sdk'; import type { SDKUserMessage } from '@anthropic-ai/claude-agent-sdk'; +import { query } from '@anthropic-ai/claude-agent-sdk'; import { getEngineSettings } from '../../config/engineSettings.js'; import { logger } from '../../utils/logging.js'; import { getWorkspaceDir } from '../../utils/repo.js'; import { CLAUDE_CODE_ENGINE_DEFINITION } from '../catalog.js'; -import { NativeToolEngine } from '../shared/NativeToolEngine.js'; import { runContinuationLoop } from '../shared/continuationLoop.js'; +import { NativeToolEngine } from '../shared/NativeToolEngine.js'; import { buildSystemPrompt, buildTaskPrompt } from '../shared/nativeToolPrompts.js'; import type { AgentEngineResult, AgentExecutionPlan } from '../types.js'; import { ALLOWED_ENV_EXACT, buildClaudeEnv } from './env.js'; @@ -24,9 +24,9 @@ import { CLAUDE_CODE_MODEL_IDS, DEFAULT_CLAUDE_CODE_MODEL } from './models.js'; import { ClaudeCodeSettingsSchema, resolveClaudeCodeSettings } from './settings.js'; export { - buildToolGuidance, - buildTaskPrompt, buildSystemPrompt, + buildTaskPrompt, + buildToolGuidance, } from '../shared/nativeToolPrompts.js'; export { buildClaudeEnv as buildEnv } from './env.js'; export { buildPromptWithImages, formatErrorMessage } from './messageProcessing.js'; diff --git a/src/backends/claude-code/messageProcessing.ts b/src/backends/claude-code/messageProcessing.ts index fc891752..75d65e4d 100644 --- a/src/backends/claude-code/messageProcessing.ts +++ b/src/backends/claude-code/messageProcessing.ts @@ -1,5 +1,6 @@ import { randomUUID } from 'node:crypto'; import type { + query, SDKAssistantMessage, SDKResultMessage, SDKResultSuccess, @@ -7,7 +8,6 @@ import type { SDKSystemMessage, SDKUserMessage, } from '@anthropic-ai/claude-agent-sdk'; -import type { query } from '@anthropic-ai/claude-agent-sdk'; import { calculateCost } from '../../utils/llmMetrics.js'; import { extractPRUrl } from '../../utils/prUrl.js'; import { buildEngineResult } from '../shared/engineResult.js'; diff --git a/src/backends/codex/index.ts b/src/backends/codex/index.ts index 5a9c1d81..138de946 100644 --- a/src/backends/codex/index.ts +++ b/src/backends/codex/index.ts @@ -7,20 +7,20 @@ import { createInterface } from 'node:readline'; import { writeProjectCredential } from '../../db/repositories/credentialsRepository.js'; import { CODEX_ENGINE_DEFINITION } from '../catalog.js'; -import { NativeToolEngine } from '../shared/NativeToolEngine.js'; import { cleanupContextFiles } from '../shared/contextFiles.js'; import { appendEngineLog } from '../shared/engineLog.js'; import { buildEngineResult, extractAndBuildPrEvidence } from '../shared/engineResult.js'; import { SHARED_ALLOWED_ENV_EXACT } from '../shared/envFilter.js'; import { logLlmCall } from '../shared/llmCallLogger.js'; +import { NativeToolEngine } from '../shared/NativeToolEngine.js'; import { buildSystemPrompt, buildTaskPrompt } from '../shared/nativeToolPrompts.js'; import type { AgentEngineResult, AgentExecutionPlan, LogWriter } from '../types.js'; -import { extractUsage, parseCodexEvent } from './jsonlParser.js'; import type { UsageSummary } from './jsonlParser.js'; +import { extractUsage, parseCodexEvent } from './jsonlParser.js'; import { CODEX_MODEL_IDS, DEFAULT_CODEX_MODEL } from './models.js'; import { - CodexSettingsSchema, assertHeadlessCodexSettings, + CodexSettingsSchema, resolveCodexSettings, } from './settings.js'; @@ -616,10 +616,10 @@ export class CodexEngine extends NativeToolEngine { } } -export { resolveCodexModel }; export { extractErrorMessage, - extractToolCall, extractTextParts, + extractToolCall, extractUsage, } from './jsonlParser.js'; +export { resolveCodexModel }; diff --git a/src/backends/index.ts b/src/backends/index.ts index 0349ebac..88bfdc04 100644 --- a/src/backends/index.ts +++ b/src/backends/index.ts @@ -1,3 +1,20 @@ +export { executeWithEngine } from './adapter.js'; +export { registerBuiltInEngines } from './bootstrap.js'; +export { ClaudeCodeEngine } from './claude-code/index.js'; +export { CodexEngine } from './codex/index.js'; +export { LlmistEngine } from './llmist/index.js'; +export { OpenCodeEngine } from './opencode/index.js'; +export { createProgressMonitor, ProgressMonitor } from './progress.js'; +export { + getEngine, + getEngineCatalog, + getRegisteredEngines, + isNativeToolEngine, + isNativeToolEngineDefinition, + registerEngine, +} from './registry.js'; +export { resolveEngineName } from './resolution.js'; +export { NativeToolEngine } from './shared/index.js'; export type { AgentEngine, AgentEngineDefinition, @@ -10,21 +27,3 @@ export type { ProgressReporter, ToolManifest, } from './types.js'; -export { NativeToolEngine } from './shared/index.js'; - -export { - getEngine, - getEngineCatalog, - getRegisteredEngines, - isNativeToolEngine, - isNativeToolEngineDefinition, - registerEngine, -} from './registry.js'; -export { registerBuiltInEngines } from './bootstrap.js'; -export { resolveEngineName } from './resolution.js'; -export { executeWithEngine } from './adapter.js'; -export { createProgressMonitor, ProgressMonitor } from './progress.js'; -export { LlmistEngine } from './llmist/index.js'; -export { ClaudeCodeEngine } from './claude-code/index.js'; -export { CodexEngine } from './codex/index.js'; -export { OpenCodeEngine } from './opencode/index.js'; diff --git a/src/backends/llmist/index.ts b/src/backends/llmist/index.ts index ab9976bd..8db7ab48 100644 --- a/src/backends/llmist/index.ts +++ b/src/backends/llmist/index.ts @@ -1,6 +1,6 @@ import os from 'node:os'; -import { LLMist, type ModelSpec, createLogger } from 'llmist'; +import { createLogger, LLMist, type ModelSpec } from 'llmist'; import { createIntegrationChecker } from '../../agents/capabilities/index.js'; import { getAgentProfile } from '../../agents/definitions/profiles.js'; diff --git a/src/backends/opencode/index.ts b/src/backends/opencode/index.ts index 6c9c2b91..16c911eb 100644 --- a/src/backends/opencode/index.ts +++ b/src/backends/opencode/index.ts @@ -1,5 +1,5 @@ -import { createOpencodeClient } from '@opencode-ai/sdk/client'; import type { AssistantMessage, Config, Part } from '@opencode-ai/sdk/client'; +import { createOpencodeClient } from '@opencode-ai/sdk/client'; import { logger } from '../../utils/logging.js'; import { OPENCODE_ENGINE_DEFINITION } from '../catalog.js'; @@ -8,23 +8,23 @@ import { isRetryableNativeToolError, retryNativeToolOperation, } from '../nativeToolRetry.js'; -import { NativeToolEngine } from '../shared/NativeToolEngine.js'; import { cleanupContextFiles } from '../shared/contextFiles.js'; import { runContinuationLoop } from '../shared/continuationLoop.js'; import { buildEngineResult, extractAndBuildPrEvidence } from '../shared/engineResult.js'; import { SHARED_ALLOWED_ENV_EXACT } from '../shared/envFilter.js'; +import { NativeToolEngine } from '../shared/NativeToolEngine.js'; import { buildSystemPrompt, buildTaskPrompt } from '../shared/nativeToolPrompts.js'; import type { AgentEngineResult, AgentExecutionPlan } from '../types.js'; import { DEFAULT_OPENCODE_MODEL } from './models.js'; import { buildPermissionConfig } from './permissions.js'; import { - type OpenCodeServerState, attachServerState, formatOpenCodeServerExitError, + type OpenCodeServerState, startOpenCodeServer, } from './server.js'; import { OpenCodeSettingsSchema, resolveOpenCodeSettings } from './settings.js'; -import { type OpenCodeStreamState, getPartialOutput, processStreamEvent } from './stream.js'; +import { getPartialOutput, type OpenCodeStreamState, processStreamEvent } from './stream.js'; export function resolveOpenCodeModel(cascadeModel: string): string { if (cascadeModel.includes('/') && !cascadeModel.includes(':')) return cascadeModel; diff --git a/src/backends/opencode/server.ts b/src/backends/opencode/server.ts index 5f2f46ac..e212647a 100644 --- a/src/backends/opencode/server.ts +++ b/src/backends/opencode/server.ts @@ -6,7 +6,7 @@ */ import { spawn } from 'node:child_process'; -import { type Server, createServer } from 'node:net'; +import { createServer, type Server } from 'node:net'; import type { Config } from '@opencode-ai/sdk/client'; diff --git a/src/backends/opencode/stream.ts b/src/backends/opencode/stream.ts index 5a374cd6..e1282b02 100644 --- a/src/backends/opencode/stream.ts +++ b/src/backends/opencode/stream.ts @@ -5,16 +5,15 @@ * permission requests, session lifecycle events, and message part updates. */ -import type { createOpencodeClient } from '@opencode-ai/sdk/client'; -import type { Event, Part, ToolPart } from '@opencode-ai/sdk/client'; +import type { createOpencodeClient, Event, Part, ToolPart } from '@opencode-ai/sdk/client'; import { retryNativeToolOperation } from '../nativeToolRetry.js'; import { appendEngineLog } from '../shared/engineLog.js'; import { logLlmCall } from '../shared/llmCallLogger.js'; import type { AgentExecutionPlan } from '../types.js'; import { - type OpenCodePermissionConfig, normalizePermissionDecision, + type OpenCodePermissionConfig, resolvePermissionDecision, } from './permissions.js'; diff --git a/src/backends/progressMonitor.ts b/src/backends/progressMonitor.ts index dfd1f247..2ec8e67a 100644 --- a/src/backends/progressMonitor.ts +++ b/src/backends/progressMonitor.ts @@ -23,11 +23,11 @@ import { formatStatusMessage } from '../config/statusUpdateConfig.js'; import { captureException } from '../sentry.js'; import { buildRunLink, buildWorkItemRunsLink, getDashboardUrl } from '../utils/runLink.js'; import { callProgressModel } from './progressModel.js'; -import { clearProgressCommentId, writeProgressCommentId } from './progressState.js'; import { ProgressAccumulator } from './progressState/accumulator.js'; import { GitHubProgressPoster } from './progressState/githubPoster.js'; import { PMProgressPoster } from './progressState/pmPoster.js'; import { DEFAULT_SCHEDULE_MINUTES, ProgressScheduler } from './progressState/scheduler.js'; +import { clearProgressCommentId, writeProgressCommentId } from './progressState.js'; import type { LogWriter, ProgressReporter } from './types.js'; export interface ProgressMonitorConfig { diff --git a/src/backends/shared/continuationLoop.ts b/src/backends/shared/continuationLoop.ts index 02b39748..91ab8254 100644 --- a/src/backends/shared/continuationLoop.ts +++ b/src/backends/shared/continuationLoop.ts @@ -13,8 +13,8 @@ */ import { - type CompletionRequirements, applyCompletionEvidence, + type CompletionRequirements, getCompletionFailure, readCompletionEvidence, } from '../completion.js'; diff --git a/src/backends/shared/envBuilder.ts b/src/backends/shared/envBuilder.ts index cf2e7456..d889520d 100644 --- a/src/backends/shared/envBuilder.ts +++ b/src/backends/shared/envBuilder.ts @@ -10,10 +10,10 @@ import { buildNativeToolPath } from '../nativeToolRuntime.js'; import { + filterProcessEnv, SHARED_ALLOWED_ENV_EXACT, SHARED_ALLOWED_ENV_PREFIXES, SHARED_BLOCKED_ENV_EXACT, - filterProcessEnv, } from './envFilter.js'; export interface BuildEngineEnvOptions { diff --git a/src/backends/sidecarManager.ts b/src/backends/sidecarManager.ts index 9f2ede8b..3fd21798 100644 --- a/src/backends/sidecarManager.ts +++ b/src/backends/sidecarManager.ts @@ -4,11 +4,11 @@ import { join } from 'node:path'; import type { getAgentProfile } from '../agents/definitions/profiles.js'; import { + clearInitialComment, PM_WRITE_SIDECAR_ENV_VAR, PR_SIDECAR_ENV_VAR, PUSHED_CHANGES_SIDECAR_ENV_VAR, REVIEW_SIDECAR_ENV_VAR, - clearInitialComment, recordPRCreation, recordReviewSubmission, } from '../gadgets/sessionState.js'; diff --git a/src/cli/dashboard/_shared/base.ts b/src/cli/dashboard/_shared/base.ts index 8e937e81..76b7bd5d 100644 --- a/src/cli/dashboard/_shared/base.ts +++ b/src/cli/dashboard/_shared/base.ts @@ -1,7 +1,7 @@ import { Command, Flags } from '@oclif/core'; import { TRPCClientError } from '@trpc/client'; import chalk from 'chalk'; -import { type DashboardClient, createDashboardClient } from './client.js'; +import { createDashboardClient, type DashboardClient } from './client.js'; import { type CliConfig, loadConfig } from './config.js'; import { formatActionableError, mapError } from './errors.js'; import { printCompact, printCsv, printDetail, printTable } from './format.js'; diff --git a/src/cli/pm/add-checklist.ts b/src/cli/pm/add-checklist.ts index 6ad06092..a872f6e0 100644 --- a/src/cli/pm/add-checklist.ts +++ b/src/cli/pm/add-checklist.ts @@ -1,4 +1,4 @@ -import { type ChecklistItemInput, addChecklist } from '../../gadgets/pm/core/addChecklist.js'; +import { addChecklist, type ChecklistItemInput } from '../../gadgets/pm/core/addChecklist.js'; import { addChecklistDef } from '../../gadgets/pm/definitions.js'; import { writePMWriteSidecar } from '../../gadgets/session/core/sidecar.js'; import { PM_WRITE_SIDECAR_ENV_VAR } from '../../gadgets/sessionState.js'; diff --git a/src/cli/session/finish.ts b/src/cli/session/finish.ts index afa57b3e..e72108bc 100644 --- a/src/cli/session/finish.ts +++ b/src/cli/session/finish.ts @@ -1,7 +1,6 @@ import { Command, Flags } from '@oclif/core'; import { readCompletionEvidence } from '../../backends/completion.js'; -import { validateFinish } from '../../gadgets/session/core/finish.js'; -import { writePushedChangesSidecar } from '../../gadgets/session/core/finish.js'; +import { validateFinish, writePushedChangesSidecar } from '../../gadgets/session/core/finish.js'; import { finishDef } from '../../gadgets/session/definitions.js'; import type { SessionHooks } from '../../gadgets/sessionState.js'; import { diff --git a/src/config/retryConfig.ts b/src/config/retryConfig.ts index 418ef331..1acaca30 100644 --- a/src/config/retryConfig.ts +++ b/src/config/retryConfig.ts @@ -1,5 +1,5 @@ -import { type RetryConfig, isRetryableError } from 'llmist'; import type { ILogObj, Logger } from 'llmist'; +import { isRetryableError, type RetryConfig } from 'llmist'; import { addBreadcrumb, captureException } from '../sentry.js'; /** diff --git a/src/db/repositories/agentDefinitionsRepository.ts b/src/db/repositories/agentDefinitionsRepository.ts index b08555fa..5a101908 100644 --- a/src/db/repositories/agentDefinitionsRepository.ts +++ b/src/db/repositories/agentDefinitionsRepository.ts @@ -1,6 +1,6 @@ import { eq } from 'drizzle-orm'; -import { AgentDefinitionSchema } from '../../agents/definitions/schema.js'; import type { AgentDefinition } from '../../agents/definitions/schema.js'; +import { AgentDefinitionSchema } from '../../agents/definitions/schema.js'; import { getDb } from '../client.js'; import { agentDefinitions } from '../schema/index.js'; diff --git a/src/db/repositories/configRepository.ts b/src/db/repositories/configRepository.ts index f2376450..558a6a04 100644 --- a/src/db/repositories/configRepository.ts +++ b/src/db/repositories/configRepository.ts @@ -1,12 +1,12 @@ -import { type SQL, eq, sql } from 'drizzle-orm'; +import { eq, type SQL, sql } from 'drizzle-orm'; import { validateConfig } from '../../config/schema.js'; import type { CascadeConfig, ProjectConfig } from '../../types/index.js'; import { getDb } from '../client.js'; import { agentConfigs, projectIntegrations, projects } from '../schema/index.js'; import { type AgentConfigRow, - type IntegrationRow, extractIntegrationConfigs, + type IntegrationRow, mapProjectRow, } from './configMapper.js'; diff --git a/src/db/repositories/joinHelpers.ts b/src/db/repositories/joinHelpers.ts index 86b32062..1703364b 100644 --- a/src/db/repositories/joinHelpers.ts +++ b/src/db/repositories/joinHelpers.ts @@ -1,4 +1,4 @@ -import { type SQL, and, eq, or, sql } from 'drizzle-orm'; +import { and, eq, or, type SQL, sql } from 'drizzle-orm'; import { agentRuns, prWorkItems } from '../schema/index.js'; /** diff --git a/src/db/repositories/prWorkItemsRepository.ts b/src/db/repositories/prWorkItemsRepository.ts index 623cd08a..822816be 100644 --- a/src/db/repositories/prWorkItemsRepository.ts +++ b/src/db/repositories/prWorkItemsRepository.ts @@ -1,5 +1,4 @@ import { - type SQL, and, countDistinct, desc, @@ -8,10 +7,11 @@ import { isNotNull, isNull, max, + type SQL, sum, } from 'drizzle-orm'; import { getDb } from '../client.js'; -import { agentRuns, prWorkItems, projects } from '../schema/index.js'; +import { agentRuns, projects, prWorkItems } from '../schema/index.js'; import { buildAgentRunWorkItemJoin } from './joinHelpers.js'; export interface LinkPRToWorkItemOptions { diff --git a/src/db/repositories/runStatsRepository.ts b/src/db/repositories/runStatsRepository.ts index 68279bfe..4d303f09 100644 --- a/src/db/repositories/runStatsRepository.ts +++ b/src/db/repositories/runStatsRepository.ts @@ -1,6 +1,6 @@ -import { type SQL, and, asc, count, desc, eq, gte, inArray, lte, sql } from 'drizzle-orm'; +import { and, asc, count, desc, eq, gte, inArray, lte, type SQL, sql } from 'drizzle-orm'; import { getDb } from '../client.js'; -import { agentRuns, organizations, prWorkItems, projects } from '../schema/index.js'; +import { agentRuns, organizations, projects, prWorkItems } from '../schema/index.js'; import { buildAgentRunWorkItemJoin } from './joinHelpers.js'; // ============================================================================ diff --git a/src/db/repositories/runsRepository.ts b/src/db/repositories/runsRepository.ts index 18854df5..13144691 100644 --- a/src/db/repositories/runsRepository.ts +++ b/src/db/repositories/runsRepository.ts @@ -1,4 +1,4 @@ -import { type SQL, and, count, desc, eq, gte, isNull } from 'drizzle-orm'; +import { and, count, desc, eq, gte, isNull, type SQL } from 'drizzle-orm'; import { getDb } from '../client.js'; import { agentRuns, prWorkItems } from '../schema/index.js'; import { buildAgentRunWorkItemJoin } from './joinHelpers.js'; @@ -306,6 +306,13 @@ export async function cancelRunById(runId: string, reason: string): Promise { - output += data.toString(); + _output += data.toString(); }); sg.stderr.on('data', (data: Buffer) => { errorOutput += data.toString(); diff --git a/src/gadgets/Finish.ts b/src/gadgets/Finish.ts index 423a444a..d539461f 100644 --- a/src/gadgets/Finish.ts +++ b/src/gadgets/Finish.ts @@ -1,8 +1,7 @@ import { TaskCompletionSignal } from 'llmist'; import { validateFinish, writePushedChangesSidecar } from './session/core/finish.js'; import { finishDef } from './session/definitions.js'; -import { PUSHED_CHANGES_SIDECAR_ENV_VAR } from './sessionState.js'; -import { getSessionState } from './sessionState.js'; +import { getSessionState, PUSHED_CHANGES_SIDECAR_ENV_VAR } from './sessionState.js'; import { createGadgetClass } from './shared/gadgetFactory.js'; export const Finish = createGadgetClass(finishDef, async (params) => { diff --git a/src/gadgets/ListDirectory.ts b/src/gadgets/ListDirectory.ts index 602522a7..1e01489b 100644 --- a/src/gadgets/ListDirectory.ts +++ b/src/gadgets/ListDirectory.ts @@ -5,7 +5,7 @@ * Use includeGitIgnored=true to include all files. */ import { execSync } from 'node:child_process'; -import { type Stats, readdirSync, statSync } from 'node:fs'; +import { readdirSync, type Stats, statSync } from 'node:fs'; import { join, relative } from 'node:path'; import { Gadget, z } from 'llmist'; diff --git a/src/gadgets/github/index.ts b/src/gadgets/github/index.ts index 25a055bf..0eaef1b0 100644 --- a/src/gadgets/github/index.ts +++ b/src/gadgets/github/index.ts @@ -1,13 +1,5 @@ export { CreatePR } from './CreatePR.js'; export { CreatePRReview } from './CreatePRReview.js'; -export { GetCIRunLogs } from './GetCIRunLogs.js'; -export { formatCheckStatus, GetPRChecks } from './GetPRChecks.js'; -export { GetPRComments } from './GetPRComments.js'; -export { GetPRDetails } from './GetPRDetails.js'; -export { GetPRDiff } from './GetPRDiff.js'; -export { PostPRComment } from './PostPRComment.js'; -export { ReplyToReviewComment } from './ReplyToReviewComment.js'; -export { UpdatePRComment } from './UpdatePRComment.js'; export { createPRDef, createPRReviewDef, @@ -20,3 +12,11 @@ export { replyToReviewCommentDef, updatePRCommentDef, } from './definitions.js'; +export { GetCIRunLogs } from './GetCIRunLogs.js'; +export { formatCheckStatus, GetPRChecks } from './GetPRChecks.js'; +export { GetPRComments } from './GetPRComments.js'; +export { GetPRDetails } from './GetPRDetails.js'; +export { GetPRDiff } from './GetPRDiff.js'; +export { PostPRComment } from './PostPRComment.js'; +export { ReplyToReviewComment } from './ReplyToReviewComment.js'; +export { UpdatePRComment } from './UpdatePRComment.js'; diff --git a/src/gadgets/pm/AddChecklist.ts b/src/gadgets/pm/AddChecklist.ts index 9d3c96be..7e19fb6b 100644 --- a/src/gadgets/pm/AddChecklist.ts +++ b/src/gadgets/pm/AddChecklist.ts @@ -1,5 +1,5 @@ import { createGadgetClass } from '../shared/gadgetFactory.js'; -import { type ChecklistItemInput, addChecklist } from './core/addChecklist.js'; +import { addChecklist, type ChecklistItemInput } from './core/addChecklist.js'; import { addChecklistDef } from './definitions.js'; export const AddChecklist = createGadgetClass(addChecklistDef, async (params) => { diff --git a/src/gadgets/pm/core/readWorkItem.ts b/src/gadgets/pm/core/readWorkItem.ts index 912f4928..674198da 100644 --- a/src/gadgets/pm/core/readWorkItem.ts +++ b/src/gadgets/pm/core/readWorkItem.ts @@ -1,5 +1,5 @@ -import { filterImageMedia, getPMProvider } from '../../../pm/index.js'; import type { Attachment, MediaReference } from '../../../pm/index.js'; +import { filterImageMedia, getPMProvider } from '../../../pm/index.js'; interface Label { name: string; diff --git a/src/gadgets/pm/index.ts b/src/gadgets/pm/index.ts index 5a0cc88d..aae6ed07 100644 --- a/src/gadgets/pm/index.ts +++ b/src/gadgets/pm/index.ts @@ -1,9 +1,9 @@ -export { ReadWorkItem } from './ReadWorkItem.js'; -export { PostComment } from './PostComment.js'; -export { UpdateWorkItem } from './UpdateWorkItem.js'; +export { AddChecklist } from './AddChecklist.js'; export { CreateWorkItem } from './CreateWorkItem.js'; +export { PMDeleteChecklistItem } from './DeleteChecklistItem.js'; export { ListWorkItems } from './ListWorkItems.js'; export { MoveWorkItem } from './MoveWorkItem.js'; -export { AddChecklist } from './AddChecklist.js'; +export { PostComment } from './PostComment.js'; +export { ReadWorkItem } from './ReadWorkItem.js'; export { PMUpdateChecklistItem } from './UpdateChecklistItem.js'; -export { PMDeleteChecklistItem } from './DeleteChecklistItem.js'; +export { UpdateWorkItem } from './UpdateWorkItem.js'; diff --git a/src/gadgets/sentry/index.ts b/src/gadgets/sentry/index.ts index 1106f329..86dcbfbb 100644 --- a/src/gadgets/sentry/index.ts +++ b/src/gadgets/sentry/index.ts @@ -1,3 +1,3 @@ -export { GetAlertingIssue } from './GetAlertingIssue.js'; export { GetAlertingEventDetail } from './GetAlertingEventDetail.js'; +export { GetAlertingIssue } from './GetAlertingIssue.js'; export { ListAlertingEvents } from './ListAlertingEvents.js'; diff --git a/src/gadgets/shared/diagnosticState.ts b/src/gadgets/shared/diagnosticState.ts index 8ed42269..8a8448a4 100644 --- a/src/gadgets/shared/diagnosticState.ts +++ b/src/gadgets/shared/diagnosticState.ts @@ -5,8 +5,8 @@ * allowing the trailing message to display consolidated diagnostic status. */ -import { runDiagnostics as runDiagnosticsCore, shouldRunDiagnostics } from './diagnostics.js'; import type { DiagnosticsResult } from './diagnostics.js'; +import { runDiagnostics as runDiagnosticsCore, shouldRunDiagnostics } from './diagnostics.js'; /** * Individual diagnostic error. diff --git a/src/gadgets/shared/index.ts b/src/gadgets/shared/index.ts index 0809dc76..293b4639 100644 --- a/src/gadgets/shared/index.ts +++ b/src/gadgets/shared/index.ts @@ -2,14 +2,14 @@ * Shared utilities and factory functions for gadgets, CLI commands, and tool manifests. */ -export * from './types.js'; -export * from './matcher.js'; -export * from './pathValidation.js'; -export * from './diagnostics.js'; +export * from './cliCommandFactory.js'; export * from './diagnosticState.js'; +export * from './diagnostics.js'; +export * from './gadgetFactory.js'; +export * from './manifestGenerator.js'; +export * from './matcher.js'; export * from './onFileEditHook.js'; +export * from './pathValidation.js'; export * from './postEditChecks.js'; export * from './toolDefinition.js'; -export * from './gadgetFactory.js'; -export * from './cliCommandFactory.js'; -export * from './manifestGenerator.js'; +export * from './types.js'; diff --git a/src/gadgets/tmux.ts b/src/gadgets/tmux.ts index 891eb44d..a1ab93de 100644 --- a/src/gadgets/tmux.ts +++ b/src/gadgets/tmux.ts @@ -1,7 +1,7 @@ +export type { CompletedSessionNotice } from './tmux/index.js'; export { - Tmux, consumePendingSessionNotices, - validateGitCommand, resolveWorkingDirectory, + Tmux, + validateGitCommand, } from './tmux/index.js'; -export type { CompletedSessionNotice } from './tmux/index.js'; diff --git a/src/gadgets/tmux/TmuxGadget.ts b/src/gadgets/tmux/TmuxGadget.ts index 6142c6b3..a7ba131d 100644 --- a/src/gadgets/tmux/TmuxGadget.ts +++ b/src/gadgets/tmux/TmuxGadget.ts @@ -5,7 +5,6 @@ * All commands run as windows within a single control session. */ import { Gadget, z } from 'llmist'; -import { type TmuxControlClient, getControlClient } from './TmuxControlClient.js'; import { DEFAULT_TIMEOUT_MS, DEFAULT_WAIT_MS, @@ -18,6 +17,7 @@ import { import { CommandFailedError } from './errors.js'; import { validateGitCommand } from './gitValidation.js'; import { addPendingNotice } from './sessionNotices.js'; +import { getControlClient, type TmuxControlClient } from './TmuxControlClient.js'; import { sanitizeSessionName, sleep } from './utils.js'; export class TmuxGadget extends Gadget({ diff --git a/src/gadgets/tmux/index.ts b/src/gadgets/tmux/index.ts index 79f29402..a0f74bc7 100644 --- a/src/gadgets/tmux/index.ts +++ b/src/gadgets/tmux/index.ts @@ -1,5 +1,5 @@ -export { TmuxGadget as Tmux } from './TmuxGadget.js'; -export { consumePendingSessionNotices } from './sessionNotices.js'; -export type { CompletedSessionNotice } from './sessionNotices.js'; export { validateGitCommand } from './gitValidation.js'; +export type { CompletedSessionNotice } from './sessionNotices.js'; +export { consumePendingSessionNotices } from './sessionNotices.js'; +export { TmuxGadget as Tmux } from './TmuxGadget.js'; export { resolveWorkingDirectory } from './utils.js'; diff --git a/src/gadgets/todo/TodoUpdateStatus.ts b/src/gadgets/todo/TodoUpdateStatus.ts index db407b8e..aa39d5d4 100644 --- a/src/gadgets/todo/TodoUpdateStatus.ts +++ b/src/gadgets/todo/TodoUpdateStatus.ts @@ -3,7 +3,7 @@ * Helps agents track progress through implementation tasks. */ import { Gadget, z } from 'llmist'; -import { type TodoStatus, formatTodoList, loadTodos, saveTodos } from './storage.js'; +import { formatTodoList, loadTodos, saveTodos, type TodoStatus } from './storage.js'; export class TodoUpdateStatus extends Gadget({ name: 'TodoUpdateStatus', diff --git a/src/gadgets/todo/TodoUpsert.ts b/src/gadgets/todo/TodoUpsert.ts index d9b3fe19..28528b3f 100644 --- a/src/gadgets/todo/TodoUpsert.ts +++ b/src/gadgets/todo/TodoUpsert.ts @@ -3,7 +3,7 @@ * Helps agents plan and organize their implementation tasks. */ import { Gadget, z } from 'llmist'; -import { type Todo, formatTodoList, getNextId, loadTodos, saveTodos } from './storage.js'; +import { formatTodoList, getNextId, loadTodos, saveTodos, type Todo } from './storage.js'; interface TodoItem { id?: string; diff --git a/src/gadgets/todo/index.ts b/src/gadgets/todo/index.ts index 55cc36a3..001e052d 100644 --- a/src/gadgets/todo/index.ts +++ b/src/gadgets/todo/index.ts @@ -2,7 +2,8 @@ * Todo gadgets for agent task tracking. * Helps agents plan work and track progress through implementation tasks. */ + +export { initTodoSession } from './storage.js'; +export { TodoDelete } from './TodoDelete.js'; export { TodoUpdateStatus } from './TodoUpdateStatus.js'; export { TodoUpsert } from './TodoUpsert.js'; -export { TodoDelete } from './TodoDelete.js'; -export { initTodoSession } from './storage.js'; diff --git a/src/gadgets/todo/storage.ts b/src/gadgets/todo/storage.ts index 4ef69bc8..9bafcb44 100644 --- a/src/gadgets/todo/storage.ts +++ b/src/gadgets/todo/storage.ts @@ -88,7 +88,7 @@ export function saveTodos(todos: Todo[]): void { * Generates the next available todo ID (incrementing integer). */ export function getNextId(todos: Todo[]): string { - const maxId = todos.reduce((max, t) => Math.max(max, Number.parseInt(t.id) || 0), 0); + const maxId = todos.reduce((max, t) => Math.max(max, Number.parseInt(t.id, 10) || 0), 0); return String(maxId + 1); } diff --git a/src/integrations/index.ts b/src/integrations/index.ts index f264ce17..ee45be4d 100644 --- a/src/integrations/index.ts +++ b/src/integrations/index.ts @@ -10,7 +10,7 @@ * - `integrationRegistry` singleton — the shared registry instance */ -export type { IntegrationModule, IntegrationWebhookEvent } from './types.js'; -export type { SCMIntegration } from './scm.js'; export type { AlertingIntegration } from './alerting.js'; export { IntegrationRegistry, integrationRegistry } from './registry.js'; +export type { SCMIntegration } from './scm.js'; +export type { IntegrationModule, IntegrationWebhookEvent } from './types.js'; diff --git a/src/pm/index.ts b/src/pm/index.ts index 13eab619..332e2894 100644 --- a/src/pm/index.ts +++ b/src/pm/index.ts @@ -1,34 +1,31 @@ +export { getPMProvider, getPMProviderOrNull, withPMProvider } from './context.js'; +// PMIntegration interface + registry +export type { PMIntegration, PMWebhookEvent } from './integration.js'; +export { hasPmIntegration } from './integration.js'; +export { JiraPMProvider } from './jira/adapter.js'; +export type { ProjectPMConfig } from './lifecycle.js'; +export { hasAutoLabel, PMLifecycleManager, resolveProjectPMConfig } from './lifecycle.js'; +export { + extractMarkdownImages, + filterImageMedia, + isImageMimeType, + MAX_IMAGE_SIZE_BYTES, + MAX_IMAGES_PER_WORK_ITEM, +} from './media.js'; +export { pmRegistry } from './registry.js'; +export { TrelloPMProvider } from './trello/adapter.js'; export type { + Attachment, + Checklist, + ChecklistItem, + CreateWorkItemConfig, + MediaReference, PMProvider, PMType, WorkItem, WorkItemComment, WorkItemLabel, - Checklist, - ChecklistItem, - Attachment, - CreateWorkItemConfig, - MediaReference, } from './types.js'; - -export { - MAX_IMAGE_SIZE_BYTES, - MAX_IMAGES_PER_WORK_ITEM, - isImageMimeType, - filterImageMedia, - extractMarkdownImages, -} from './media.js'; - -export { withPMProvider, getPMProvider, getPMProviderOrNull } from './context.js'; -export { TrelloPMProvider } from './trello/adapter.js'; -export { JiraPMProvider } from './jira/adapter.js'; -export { PMLifecycleManager, resolveProjectPMConfig, hasAutoLabel } from './lifecycle.js'; -export type { ProjectPMConfig } from './lifecycle.js'; - -// PMIntegration interface + registry -export type { PMIntegration, PMWebhookEvent } from './integration.js'; -export { hasPmIntegration } from './integration.js'; -export { pmRegistry } from './registry.js'; export { processPMWebhook } from './webhook-handler.js'; import { integrationRegistry } from '../integrations/registry.js'; diff --git a/src/router/ackMessageGenerator.ts b/src/router/ackMessageGenerator.ts index bab9d2f8..0f757d26 100644 --- a/src/router/ackMessageGenerator.ts +++ b/src/router/ackMessageGenerator.ts @@ -231,7 +231,7 @@ export async function generateAckMessage( const result = await Promise.race([llmPromise, timeoutPromise]); - if (!result || !result.trim()) { + if (!result?.trim()) { return fallback; } diff --git a/src/router/adapters/github.ts b/src/router/adapters/github.ts index da717bd7..65bd914f 100644 --- a/src/router/adapters/github.ts +++ b/src/router/adapters/github.ts @@ -12,8 +12,8 @@ import { getProjectGitHubToken } from '../../config/projects.js'; import { findProjectByRepo } from '../../config/provider.js'; import { withGitHubToken } from '../../github/client.js'; import { - type PersonaIdentities, isCascadeBot, + type PersonaIdentities, resolvePersonaIdentities, } from '../../github/personas.js'; import { withPMCredentials, withPMProvider } from '../../pm/context.js'; @@ -30,7 +30,7 @@ import { postTrelloAck, resolveGitHubTokenForAckByAgent, } from '../acknowledgments.js'; -import { type RouterProjectConfig, loadProjectConfig } from '../config.js'; +import { loadProjectConfig, type RouterProjectConfig } from '../config.js'; import { extractPRNumber } from '../notifications.js'; import type { AckResult, ParsedWebhookEvent, RouterPlatformAdapter } from '../platform-adapter.js'; import { addEyesReactionToPR } from '../pre-actions.js'; diff --git a/src/router/adapters/jira.ts b/src/router/adapters/jira.ts index 91475d3b..0e8c0544 100644 --- a/src/router/adapters/jira.ts +++ b/src/router/adapters/jira.ts @@ -14,7 +14,7 @@ import { logger } from '../../utils/logging.js'; import { buildWorkItemRunsLink, getDashboardUrl } from '../../utils/runLink.js'; import { extractJiraContext, generateAckMessage } from '../ackMessageGenerator.js'; import { postJiraAck, resolveJiraBotAccountId } from '../acknowledgments.js'; -import { type RouterProjectConfig, loadProjectConfig } from '../config.js'; +import { loadProjectConfig, type RouterProjectConfig } from '../config.js'; import type { AckResult, ParsedWebhookEvent, RouterPlatformAdapter } from '../platform-adapter.js'; import { resolveJiraCredentials } from '../platformClients/index.js'; import type { CascadeJob, JiraJob } from '../queue.js'; diff --git a/src/router/adapters/sentry.ts b/src/router/adapters/sentry.ts index a49f2a65..78bcfab1 100644 --- a/src/router/adapters/sentry.ts +++ b/src/router/adapters/sentry.ts @@ -11,7 +11,7 @@ import type { SentryAugmentedPayload } from '../../sentry/types.js'; import type { TriggerRegistry } from '../../triggers/registry.js'; import type { TriggerContext, TriggerResult } from '../../types/index.js'; import { logger } from '../../utils/logging.js'; -import { type RouterProjectConfig, loadProjectConfig } from '../config.js'; +import { loadProjectConfig, type RouterProjectConfig } from '../config.js'; import type { AckResult, ParsedWebhookEvent, RouterPlatformAdapter } from '../platform-adapter.js'; import type { CascadeJob, SentryJob } from '../queue.js'; diff --git a/src/router/adapters/trello.ts b/src/router/adapters/trello.ts index b9c60d4a..88a4b8f7 100644 --- a/src/router/adapters/trello.ts +++ b/src/router/adapters/trello.ts @@ -14,7 +14,7 @@ import { logger } from '../../utils/logging.js'; import { buildWorkItemRunsLink, getDashboardUrl } from '../../utils/runLink.js'; import { extractTrelloContext, generateAckMessage } from '../ackMessageGenerator.js'; import { postTrelloAck } from '../acknowledgments.js'; -import { type RouterProjectConfig, loadProjectConfig } from '../config.js'; +import { loadProjectConfig, type RouterProjectConfig } from '../config.js'; import type { AckResult, ParsedWebhookEvent, RouterPlatformAdapter } from '../platform-adapter.js'; import { resolveTrelloCredentials } from '../platformClients/index.js'; import type { CascadeJob, TrelloJob } from '../queue.js'; diff --git a/src/router/container-manager.ts b/src/router/container-manager.ts index d4ea8ba1..029a0be0 100644 --- a/src/router/container-manager.ts +++ b/src/router/container-manager.ts @@ -40,19 +40,19 @@ export { getActiveWorkers, } from './active-workers.js'; export { + scanAndCleanupOrphans, startOrphanCleanup, stopOrphanCleanup, - scanAndCleanupOrphans, } from './orphan-cleanup.js'; -export { - buildWorkerEnv, - extractProjectIdFromJob, -} from './worker-env.js'; export { getSnapshot, invalidateSnapshot, registerSnapshot, } from './snapshot-manager.js'; +export { + buildWorkerEnv, + extractProjectIdFromJob, +} from './worker-env.js'; const docker = new Docker(); diff --git a/src/router/platformClients/index.ts b/src/router/platformClients/index.ts index e0c5efe4..e9b4f9b9 100644 --- a/src/router/platformClients/index.ts +++ b/src/router/platformClients/index.ts @@ -8,12 +8,12 @@ * directory path. */ -export type { JiraCredentialsWithAuth, PlatformCommentClient, TrelloCredentials } from './types.js'; export { resolveGitHubHeaders, resolveJiraCredentials, resolveTrelloCredentials, } from './credentials.js'; -export { TrelloPlatformClient } from './trello.js'; export { GitHubPlatformClient } from './github.js'; -export { JiraPlatformClient, _resetJiraCloudIdCache } from './jira.js'; +export { _resetJiraCloudIdCache, JiraPlatformClient } from './jira.js'; +export { TrelloPlatformClient } from './trello.js'; +export type { JiraCredentialsWithAuth, PlatformCommentClient, TrelloCredentials } from './types.js'; diff --git a/src/router/platformClients/types.ts b/src/router/platformClients/types.ts index 50d91793..bb34cc3d 100644 --- a/src/router/platformClients/types.ts +++ b/src/router/platformClients/types.ts @@ -3,6 +3,7 @@ */ import type { JiraCredentials } from '../../jira/types.js'; + export type { TrelloCredentials } from '../../trello/types.js'; /** Extends JiraCredentials with a pre-computed Base64 Basic auth header value. */ diff --git a/src/router/reactions.ts b/src/router/reactions.ts index 7ea086bf..010f844d 100644 --- a/src/router/reactions.ts +++ b/src/router/reactions.ts @@ -9,14 +9,14 @@ */ import { getProjectGitHubToken } from '../config/projects.js'; -import { type PersonaIdentities, isCascadeBot } from '../github/personas.js'; +import { isCascadeBot, type PersonaIdentities } from '../github/personas.js'; import { trelloClient, withTrelloCredentials } from '../trello/client.js'; import type { ProjectConfig } from '../types/index.js'; import { logger } from '../utils/logging.js'; import { parseRepoFullName } from '../utils/repo.js'; import { - JiraPlatformClient, _resetJiraCloudIdCache, + JiraPlatformClient, resolveGitHubHeaders, resolveTrelloCredentials, } from './platformClients/index.js'; diff --git a/src/sentry/alerting-integration.ts b/src/sentry/alerting-integration.ts index ba2c5bb8..43cfc4d0 100644 --- a/src/sentry/alerting-integration.ts +++ b/src/sentry/alerting-integration.ts @@ -15,9 +15,9 @@ import { getIntegrationCredential } from '../config/provider.js'; import type { AlertingIntegration } from '../integrations/alerting.js'; import { - type SentryIntegrationConfig, getSentryIntegrationConfig, hasAlertingIntegration, + type SentryIntegrationConfig, } from './integration.js'; export class SentryAlertingIntegration implements AlertingIntegration { diff --git a/src/triggers/github/index.ts b/src/triggers/github/index.ts index 47060e2d..475d1dcd 100644 --- a/src/triggers/github/index.ts +++ b/src/triggers/github/index.ts @@ -6,8 +6,8 @@ export { PRMergedTrigger } from './pr-merged.js'; export { PROpenedTrigger } from './pr-opened.js'; export { PRReadyToMergeTrigger } from './pr-ready-to-merge.js'; export { PRReviewSubmittedTrigger } from './pr-review-submitted.js'; -export { ReviewRequestedTrigger } from './review-requested.js'; -export { processGitHubWebhook } from './webhook-handler.js'; export { registerGitHubTriggers } from './register.js'; +export { ReviewRequestedTrigger } from './review-requested.js'; export * from './types.js'; export * from './utils.js'; +export { processGitHubWebhook } from './webhook-handler.js'; diff --git a/src/triggers/github/pr-ready-to-merge.ts b/src/triggers/github/pr-ready-to-merge.ts index 0be98982..8fe74741 100644 --- a/src/triggers/github/pr-ready-to-merge.ts +++ b/src/triggers/github/pr-ready-to-merge.ts @@ -1,7 +1,7 @@ import { githubClient } from '../../github/client.js'; import { getPMProvider } from '../../pm/context.js'; -import { hasAutoLabel, resolveProjectPMConfig } from '../../pm/lifecycle.js'; import type { ProjectPMConfig } from '../../pm/lifecycle.js'; +import { hasAutoLabel, resolveProjectPMConfig } from '../../pm/lifecycle.js'; import type { PMProvider } from '../../pm/types.js'; import type { TriggerContext, TriggerHandler, TriggerResult } from '../../types/index.js'; import { logger } from '../../utils/logging.js'; diff --git a/src/triggers/index.ts b/src/triggers/index.ts index a9a45e9c..89ab4418 100644 --- a/src/triggers/index.ts +++ b/src/triggers/index.ts @@ -1,12 +1,12 @@ -export { type TriggerRegistry, createTriggerRegistry } from './registry.js'; +export { registerBuiltInTriggers } from './builtins.js'; +export { processGitHubWebhook } from './github/webhook-handler.js'; +export { processJiraWebhook } from './jira/webhook-handler.js'; +export { createTriggerRegistry, type TriggerRegistry } from './registry.js'; +export { processTrelloWebhook } from './trello/webhook-handler.js'; export type { + TrelloWebhookPayload, TriggerContext, TriggerHandler, TriggerResult, - TrelloWebhookPayload, } from './types.js'; export { isTrelloWebhookPayload } from './types.js'; -export { processTrelloWebhook } from './trello/webhook-handler.js'; -export { processGitHubWebhook } from './github/webhook-handler.js'; -export { processJiraWebhook } from './jira/webhook-handler.js'; -export { registerBuiltInTriggers } from './builtins.js'; diff --git a/src/triggers/jira/index.ts b/src/triggers/jira/index.ts index f577c40a..b5c39b0b 100644 --- a/src/triggers/jira/index.ts +++ b/src/triggers/jira/index.ts @@ -5,7 +5,7 @@ */ export { JiraCommentMentionTrigger } from './comment-mention.js'; -export { JiraStatusChangedTrigger } from './status-changed.js'; export { JiraReadyToProcessLabelTrigger } from './label-added.js'; -export { processJiraWebhook } from './webhook-handler.js'; export { registerJiraTriggers } from './register.js'; +export { JiraStatusChangedTrigger } from './status-changed.js'; +export { processJiraWebhook } from './webhook-handler.js'; diff --git a/src/triggers/shared/agent-execution.ts b/src/triggers/shared/agent-execution.ts index e347d0f9..156b66ef 100644 --- a/src/triggers/shared/agent-execution.ts +++ b/src/triggers/shared/agent-execution.ts @@ -6,9 +6,9 @@ import { updateRunPRNumber } from '../../db/repositories/runsRepository.js'; import { getJiraConfig, getTrelloConfig } from '../../pm/config.js'; import { getPMProvider } from '../../pm/context.js'; import { - PMLifecycleManager, createPMProvider, hasAutoLabel, + PMLifecycleManager, resolveProjectPMConfig, } from '../../pm/index.js'; import { checkTriggerEnabled } from '../../triggers/shared/trigger-check.js'; @@ -22,8 +22,8 @@ import { checkBudgetExceeded } from './budget.js'; import { triggerDebugAnalysis } from './debug-runner.js'; import { shouldTriggerDebug } from './debug-trigger.js'; import { - type ValidationResult, formatValidationErrors, + type ValidationResult, validateIntegrations, } from './integration-validation.js'; diff --git a/src/triggers/shared/agent-pm-poster.ts b/src/triggers/shared/agent-pm-poster.ts index 1e8c244e..cdecca9f 100644 --- a/src/triggers/shared/agent-pm-poster.ts +++ b/src/triggers/shared/agent-pm-poster.ts @@ -221,7 +221,7 @@ export async function postAgentOutputToPM( output: string, progressCommentId?: string, ): Promise { - if (!output || !output.trim()) { + if (!output?.trim()) { logger.warn('postAgentOutputToPM skipped: empty output', { workItemId, agentType }); return; } diff --git a/src/triggers/shared/trigger-check.ts b/src/triggers/shared/trigger-check.ts index 4f0914d0..38a29391 100644 --- a/src/triggers/shared/trigger-check.ts +++ b/src/triggers/shared/trigger-check.ts @@ -41,7 +41,7 @@ export async function checkTriggerEnabledWithParams( handlerName: string, ): Promise<{ enabled: boolean; parameters: Record }> { const config = await getResolvedTriggerConfig(projectId, agentType, triggerEvent); - if (!config || !config.enabled) { + if (!config?.enabled) { logger.info('Trigger disabled by config, skipping', { handler: handlerName, agentType, diff --git a/src/triggers/trello/index.ts b/src/triggers/trello/index.ts index 4b285da0..04806ebb 100644 --- a/src/triggers/trello/index.ts +++ b/src/triggers/trello/index.ts @@ -4,12 +4,12 @@ * For trigger registration use `registerTrelloTriggers` from `./register.js`. */ +export { TrelloCommentMentionTrigger } from './comment-mention.js'; +export { ReadyToProcessLabelTrigger } from './label-added.js'; +export { registerTrelloTriggers } from './register.js'; export { TrelloStatusChangedPlanningTrigger, TrelloStatusChangedSplittingTrigger, TrelloStatusChangedTodoTrigger, } from './status-changed.js'; -export { TrelloCommentMentionTrigger } from './comment-mention.js'; -export { ReadyToProcessLabelTrigger } from './label-added.js'; export { processTrelloWebhook } from './webhook-handler.js'; -export { registerTrelloTriggers } from './register.js'; diff --git a/src/triggers/trello/status-changed.ts b/src/triggers/trello/status-changed.ts index 2d837e69..9161db7c 100644 --- a/src/triggers/trello/status-changed.ts +++ b/src/triggers/trello/status-changed.ts @@ -3,7 +3,7 @@ import { invalidateSnapshot } from '../../router/snapshot-manager.js'; import { logger } from '../../utils/logging.js'; import { checkTriggerEnabled } from '../shared/trigger-check.js'; import type { TriggerContext, TriggerHandler, TriggerResult } from '../types.js'; -import { type TrelloWebhookPayload, isTrelloWebhookPayload } from './types.js'; +import { isTrelloWebhookPayload, type TrelloWebhookPayload } from './types.js'; // ============================================================================ // Status Changed Trigger Factory (Trello) diff --git a/src/triggers/types.ts b/src/triggers/types.ts index 570df297..f31e37f8 100644 --- a/src/triggers/types.ts +++ b/src/triggers/types.ts @@ -1,7 +1,6 @@ import type { TriggerContext, TriggerHandler, TriggerResult } from '../types/index.js'; -export type { TriggerContext, TriggerHandler, TriggerResult }; - // Re-export Trello types from their canonical location export type { TrelloWebhookPayload } from './trello/types.js'; export { isTrelloWebhookPayload } from './trello/types.js'; +export type { TriggerContext, TriggerHandler, TriggerResult }; diff --git a/src/utils/fileLogger.ts b/src/utils/fileLogger.ts index be270d9d..718ae4cb 100644 --- a/src/utils/fileLogger.ts +++ b/src/utils/fileLogger.ts @@ -3,7 +3,7 @@ import path from 'node:path'; import { PassThrough } from 'node:stream'; import archiver from 'archiver'; -import { type LLMCallLogger, createLLMCallLogger } from './llmLogging.js'; +import { createLLMCallLogger, type LLMCallLogger } from './llmLogging.js'; import { getWorkspaceDir } from './repo.js'; export interface FileLogger { diff --git a/src/utils/index.ts b/src/utils/index.ts index de5a5b91..32901c79 100644 --- a/src/utils/index.ts +++ b/src/utils/index.ts @@ -1,16 +1,16 @@ -export { logger, setLogLevel, getLogLevel } from './logging.js'; +export { cleanupLogFile, createFileLogger, type FileLogger } from './fileLogger.js'; export { - startWatchdog, clearWatchdog, - setWatchdogCleanup, clearWatchdogCleanup, + setWatchdogCleanup, + startWatchdog, } from './lifecycle.js'; +export { getLogLevel, logger, setLogLevel } from './logging.js'; export { - createTempDir, - cloneRepo, cleanupTempDir, - runCommand, + cloneRepo, + createTempDir, getWorkspaceDir, parseRepoFullName, + runCommand, } from './repo.js'; -export { createFileLogger, cleanupLogFile, type FileLogger } from './fileLogger.js'; diff --git a/src/utils/llmLogging.ts b/src/utils/llmLogging.ts index 69284559..01647069 100644 --- a/src/utils/llmLogging.ts +++ b/src/utils/llmLogging.ts @@ -1,7 +1,7 @@ import fs from 'node:fs'; import path from 'node:path'; -import { type LLMMessage, extractMessageText } from 'llmist'; +import { extractMessageText, type LLMMessage } from 'llmist'; /** * Formats LLM messages as plain text for debugging. diff --git a/src/utils/logging.ts b/src/utils/logging.ts index cacf26f7..a8b53b35 100644 --- a/src/utils/logging.ts +++ b/src/utils/logging.ts @@ -1,5 +1,5 @@ -import { createLogger } from 'llmist'; import type { ILogObj, Logger } from 'llmist'; +import { createLogger } from 'llmist'; export const LOG_LEVELS: Record = { silly: 0, diff --git a/src/webhook/webhookHandlers.ts b/src/webhook/webhookHandlers.ts index c839f1c6..b5e9f00a 100644 --- a/src/webhook/webhookHandlers.ts +++ b/src/webhook/webhookHandlers.ts @@ -20,13 +20,13 @@ import { handleProcessingError, logSuccessfulWebhook } from './webhookLogging.js // Re-exports // --------------------------------------------------------------------------- -export type { ParseResult, WebhookHandlerConfig, WebhookLogOverrides } from './webhookTypes.js'; export { parseGitHubPayload, parseJiraPayload, parseSentryPayload, parseTrelloPayload, } from './webhookParsers.js'; +export type { ParseResult, WebhookHandlerConfig, WebhookLogOverrides } from './webhookTypes.js'; // --------------------------------------------------------------------------- // Types (local import for factory use) diff --git a/src/worker-entry.ts b/src/worker-entry.ts index 365a74a5..2072559e 100644 --- a/src/worker-entry.ts +++ b/src/worker-entry.ts @@ -21,11 +21,11 @@ import { loadConfig } from './config/provider.js'; import { getDb } from './db/client.js'; import { captureException, flush, setTag } from './sentry.js'; import { - type TriggerRegistry, createTriggerRegistry, processGitHubWebhook, processJiraWebhook, registerBuiltInTriggers, + type TriggerRegistry, } from './triggers/index.js'; import { processSentryWebhook } from './triggers/sentry/webhook-handler.js'; import { processTrelloWebhook } from './triggers/trello/webhook-handler.js'; diff --git a/tests/docker/claude-code-auth/verify-auth.ts b/tests/docker/claude-code-auth/verify-auth.ts index 1cea3e81..91c22dcf 100644 --- a/tests/docker/claude-code-auth/verify-auth.ts +++ b/tests/docker/claude-code-auth/verify-auth.ts @@ -7,7 +7,7 @@ import { join } from 'node:path'; * to verify that subscription auth works in a containerized environment. * The SDK picks up the token automatically from the environment. */ -import { type SDKResultMessage, query } from '@anthropic-ai/claude-agent-sdk'; +import { query, type SDKResultMessage } from '@anthropic-ai/claude-agent-sdk'; const token = process.env.CLAUDE_CODE_OAUTH_TOKEN; if (!token) { diff --git a/tests/integration/db/agentDefinitionsRepository.test.ts b/tests/integration/db/agentDefinitionsRepository.test.ts index a55ea72c..9f201dbb 100644 --- a/tests/integration/db/agentDefinitionsRepository.test.ts +++ b/tests/integration/db/agentDefinitionsRepository.test.ts @@ -1,6 +1,6 @@ import { beforeEach, describe, expect, it } from 'vitest'; -import { AgentDefinitionSchema } from '../../../src/agents/definitions/schema.js'; import type { AgentDefinition } from '../../../src/agents/definitions/schema.js'; +import { AgentDefinitionSchema } from '../../../src/agents/definitions/schema.js'; import { deleteAgentDefinition, getAgentDefinition, diff --git a/tests/integration/db/agentTriggerConfigsRepository.test.ts b/tests/integration/db/agentTriggerConfigsRepository.test.ts index c8bae7c0..3f746d7d 100644 --- a/tests/integration/db/agentTriggerConfigsRepository.test.ts +++ b/tests/integration/db/agentTriggerConfigsRepository.test.ts @@ -444,7 +444,7 @@ describe('agentTriggerConfigsRepository (integration)', () => { flag: true, }; - const config = await upsertTriggerConfig({ + const _config = await upsertTriggerConfig({ projectId: 'test-project', agentType: 'review', triggerEvent: 'scm:check-suite-success', diff --git a/tests/integration/db/projectsRepository.test.ts b/tests/integration/db/projectsRepository.test.ts index 63789733..37159dc0 100644 --- a/tests/integration/db/projectsRepository.test.ts +++ b/tests/integration/db/projectsRepository.test.ts @@ -511,7 +511,7 @@ describe('projectsRepository (integration)', () => { describe('optional fields round-trip', () => { it('all nullable fields read back correctly when set', async () => { - const project = await createProject('test-org', { + const _project = await createProject('test-org', { id: 'all-fields-project', name: 'All Fields Project', repo: 'owner/all-fields', diff --git a/tests/integration/github-personas.test.ts b/tests/integration/github-personas.test.ts index c4cf9639..da39f07d 100644 --- a/tests/integration/github-personas.test.ts +++ b/tests/integration/github-personas.test.ts @@ -12,10 +12,10 @@ import { writeProjectCredential, } from '../../src/db/repositories/credentialsRepository.js'; import { - type PersonaIdentities, getPersonaForAgentType, getPersonaForLogin, isCascadeBot, + type PersonaIdentities, } from '../../src/github/personas.js'; import { PRReviewSubmittedTrigger } from '../../src/triggers/github/pr-review-submitted.js'; import { ReviewRequestedTrigger } from '../../src/triggers/github/review-requested.js'; diff --git a/tests/integration/helpers/seed.ts b/tests/integration/helpers/seed.ts index 8d92cb6f..47101e6b 100644 --- a/tests/integration/helpers/seed.ts +++ b/tests/integration/helpers/seed.ts @@ -64,12 +64,7 @@ export async function seedProject( * Seeds a project-scoped credential via the repository. */ export async function seedCredential( - overrides: { - projectId?: string; - name?: string; - envVarKey?: string; - value?: string; - } = {}, + overrides: { projectId?: string; name?: string; envVarKey?: string; value?: string } = {}, ) { const projectId = overrides.projectId ?? 'test-project'; const envVarKey = overrides.envVarKey ?? 'TEST_KEY'; @@ -114,10 +109,7 @@ export async function seedIntegrationCredential(overrides: { role?: string; credentialId: number; }) { - // For backward compatibility: look up the integration and write to project_credentials - const { removeIntegrationCredential } = await import( - '../../../src/db/repositories/integrationsRepository.js' - ); + // For backward compatibility. // The credentialId is no longer meaningful after legacy table removal. // This function is preserved to avoid breaking existing test seeds that call it. // Integration credentials are now stored in project_credentials by envVarKey. @@ -264,11 +256,7 @@ export async function seedWebhookLog( * Seeds a prompt partial row. */ export async function seedPromptPartial( - overrides: { - orgId?: string | null; - name?: string; - content?: string; - } = {}, + overrides: { orgId?: string | null; name?: string; content?: string } = {}, ) { const db = getDb(); const [row] = await db @@ -285,11 +273,7 @@ export async function seedPromptPartial( /** * Seeds a session for a user. */ -export async function seedSession(overrides: { - userId: string; - token?: string; - expiresAt?: Date; -}) { +export async function seedSession(overrides: { userId: string; token?: string; expiresAt?: Date }) { const db = getDb(); const futureDate = new Date(); futureDate.setDate(futureDate.getDate() + 30); diff --git a/tests/unit/agents/capabilities/resolver.test.ts b/tests/unit/agents/capabilities/resolver.test.ts index 2946378b..d03d8932 100644 --- a/tests/unit/agents/capabilities/resolver.test.ts +++ b/tests/unit/agents/capabilities/resolver.test.ts @@ -1,4 +1,4 @@ -import { beforeEach, describe, expect, it, vi } from 'vitest'; +import { describe, expect, it, vi } from 'vitest'; /** Create a mock class with the given name so constructor.name works in assertions */ function mockClass(name: string) { diff --git a/tests/unit/agents/definitions/contextSteps.test.ts b/tests/unit/agents/definitions/contextSteps.test.ts index 86602498..c14d2566 100644 --- a/tests/unit/agents/definitions/contextSteps.test.ts +++ b/tests/unit/agents/definitions/contextSteps.test.ts @@ -32,11 +32,11 @@ vi.mock('../../../../src/gadgets/pm/core/readWorkItem.js', () => ({ readWorkItemWithMedia: vi.fn(), })); +import type { FetchContextParams } from '../../../../src/agents/definitions/contextSteps.js'; import { fetchWorkItemStep, prepopulateTodosStep, } from '../../../../src/agents/definitions/contextSteps.js'; -import type { FetchContextParams } from '../../../../src/agents/definitions/contextSteps.js'; import { readWorkItemWithMedia } from '../../../../src/gadgets/pm/core/readWorkItem.js'; import { initTodoSession, saveTodos } from '../../../../src/gadgets/todo/storage.js'; import { getPMProviderOrNull } from '../../../../src/pm/index.js'; diff --git a/tests/unit/agents/definitions/pipelineSnapshot.test.ts b/tests/unit/agents/definitions/pipelineSnapshot.test.ts index cf31b457..06f8aae8 100644 --- a/tests/unit/agents/definitions/pipelineSnapshot.test.ts +++ b/tests/unit/agents/definitions/pipelineSnapshot.test.ts @@ -8,8 +8,8 @@ vi.mock('../../../../src/gadgets/pm/core/readWorkItem.js', () => ({ readWorkItem: vi.fn(), })); -import { fetchPipelineSnapshotStep } from '../../../../src/agents/definitions/contextSteps.js'; import type { FetchContextParams } from '../../../../src/agents/definitions/contextSteps.js'; +import { fetchPipelineSnapshotStep } from '../../../../src/agents/definitions/contextSteps.js'; import { readWorkItem } from '../../../../src/gadgets/pm/core/readWorkItem.js'; import { getPMProviderOrNull } from '../../../../src/pm/index.js'; import type { AgentInput, ProjectConfig } from '../../../../src/types/index.js'; diff --git a/tests/unit/agents/hooks.test.ts b/tests/unit/agents/hooks.test.ts index 528b2231..8be48094 100644 --- a/tests/unit/agents/hooks.test.ts +++ b/tests/unit/agents/hooks.test.ts @@ -1,4 +1,4 @@ -import { beforeEach, describe, expect, it, vi } from 'vitest'; +import { describe, expect, it, vi } from 'vitest'; vi.mock('../../../src/utils/llmMetrics.js', () => ({ calculateCost: vi.fn().mockReturnValue(0.005), diff --git a/tests/unit/agents/registry.test.ts b/tests/unit/agents/registry.test.ts index 9f3068d2..f477a6e7 100644 --- a/tests/unit/agents/registry.test.ts +++ b/tests/unit/agents/registry.test.ts @@ -1,4 +1,4 @@ -import { beforeEach, describe, expect, it, vi } from 'vitest'; +import { describe, expect, it, vi } from 'vitest'; // Must mock engine modules before importing the registry vi.mock('../../../src/backends/adapter.js', () => ({ diff --git a/tests/unit/agents/shared/gadgets.test.ts b/tests/unit/agents/shared/gadgets.test.ts index 1c42bfe9..ee923be9 100644 --- a/tests/unit/agents/shared/gadgets.test.ts +++ b/tests/unit/agents/shared/gadgets.test.ts @@ -1,4 +1,4 @@ -import { beforeEach, describe, expect, it, vi } from 'vitest'; +import { describe, expect, it, vi } from 'vitest'; /** Create a mock class with the given name so constructor.name works in assertions */ function mockClass(name: string) { diff --git a/tests/unit/agents/shared/repository.test.ts b/tests/unit/agents/shared/repository.test.ts index 2ef2c95e..de02283c 100644 --- a/tests/unit/agents/shared/repository.test.ts +++ b/tests/unit/agents/shared/repository.test.ts @@ -69,12 +69,10 @@ beforeEach(() => { mockReaddirSync.mockReturnValue([]); mockWarmTypeScriptCache.mockResolvedValue(null); mockGetWorkspaceDir.mockReturnValue('/workspace'); - // biome-ignore lint/performance/noDelete: process.env requires delete to truly unset delete process.env.CASCADE_SNAPSHOT_REUSE; }); afterEach(() => { - // biome-ignore lint/performance/noDelete: process.env requires delete to truly unset delete process.env.CASCADE_SNAPSHOT_REUSE; }); @@ -471,7 +469,6 @@ describe('setupRepository — snapshot-reuse path', () => { }); it('does not enter snapshot path when CASCADE_SNAPSHOT_REUSE is absent', async () => { - // biome-ignore lint/performance/noDelete: process.env requires delete to truly unset delete process.env.CASCADE_SNAPSHOT_REUSE; const project = makeProject(); const log = makeLog(); diff --git a/tests/unit/agents/shared/runTracking.test.ts b/tests/unit/agents/shared/runTracking.test.ts index ef6fd9c1..8e0f5082 100644 --- a/tests/unit/agents/shared/runTracking.test.ts +++ b/tests/unit/agents/shared/runTracking.test.ts @@ -26,8 +26,8 @@ vi.mock('node:fs', () => ({ import fs from 'node:fs'; import { - type RunTrackingInput, finalizeEngineRun, + type RunTrackingInput, tryCompleteRun, tryCreateRun, tryStoreRunLogs, @@ -73,7 +73,6 @@ describe('tryCreateRun', () => { }); afterEach(() => { - // biome-ignore lint/performance/noDelete: Clean up test environment delete process.env.JOB_ID; }); @@ -102,7 +101,6 @@ describe('tryCreateRun', () => { }); it('does not call updateRunJobId when JOB_ID env var is not set', async () => { - // biome-ignore lint/performance/noDelete: Clean environment before test delete process.env.JOB_ID; mockUpdateRunJobId.mockClear(); diff --git a/tests/unit/agents/shared/syntheticCalls.test.ts b/tests/unit/agents/shared/syntheticCalls.test.ts index 98170ac9..28d3b1c0 100644 --- a/tests/unit/agents/shared/syntheticCalls.test.ts +++ b/tests/unit/agents/shared/syntheticCalls.test.ts @@ -1,4 +1,4 @@ -import { beforeEach, describe, expect, it, vi } from 'vitest'; +import { describe, expect, it, vi } from 'vitest'; vi.mock('../../../../src/agents/utils/tracking.js', () => ({ recordSyntheticInvocationId: vi.fn(), diff --git a/tests/unit/agents/utils/agentLoop.test.ts b/tests/unit/agents/utils/agentLoop.test.ts index 0fddd69e..657afee0 100644 --- a/tests/unit/agents/utils/agentLoop.test.ts +++ b/tests/unit/agents/utils/agentLoop.test.ts @@ -48,7 +48,7 @@ import { const mockAddBreadcrumb = vi.mocked(addBreadcrumb); const mockConsumePendingSessionNotices = vi.mocked(consumePendingSessionNotices); const mockDisplayGadgetCall = vi.mocked(displayGadgetCall); -const mockDisplayGadgetResult = vi.mocked(displayGadgetResult); +const _mockDisplayGadgetResult = vi.mocked(displayGadgetResult); const mockDisplayLLMText = vi.mocked(displayLLMText); const mockWaitForEnter = vi.mocked(waitForEnter); const mockConsumeLoopAction = vi.mocked(consumeLoopAction); diff --git a/tests/unit/agents/utils/logging.test.ts b/tests/unit/agents/utils/logging.test.ts index 2f7bbd9d..8a8d49cb 100644 --- a/tests/unit/agents/utils/logging.test.ts +++ b/tests/unit/agents/utils/logging.test.ts @@ -1,4 +1,4 @@ -import { beforeEach, describe, expect, it, vi } from 'vitest'; +import { describe, expect, it, vi } from 'vitest'; vi.mock('../../../../src/utils/logging.js', () => ({ logger: { diff --git a/tests/unit/agents/utils/setup.test.ts b/tests/unit/agents/utils/setup.test.ts index 677adb32..fb1d3501 100644 --- a/tests/unit/agents/utils/setup.test.ts +++ b/tests/unit/agents/utils/setup.test.ts @@ -11,9 +11,9 @@ vi.mock('../../../../src/utils/repo.js', () => ({ import { existsSync, readFileSync } from 'node:fs'; import { - LOG_LEVELS, getLogLevel, installDependencies, + LOG_LEVELS, readContextFiles, warmTypeScriptCache, } from '../../../../src/agents/utils/setup.js'; diff --git a/tests/unit/agents/utils/tracking.test.ts b/tests/unit/agents/utils/tracking.test.ts index ac06735a..aebb1a25 100644 --- a/tests/unit/agents/utils/tracking.test.ts +++ b/tests/unit/agents/utils/tracking.test.ts @@ -1,11 +1,11 @@ import { describe, expect, it } from 'vitest'; import { - LOOP_THRESHOLDS, checkForLoopAndAdvance, consumeLoopAction, consumeLoopWarning, createTrackingContext, + LOOP_THRESHOLDS, recordGadgetCallForLoop, } from '../../../../src/agents/utils/tracking.js'; diff --git a/tests/unit/api/access-control.test.ts b/tests/unit/api/access-control.test.ts index 93aafa99..15dc4832 100644 --- a/tests/unit/api/access-control.test.ts +++ b/tests/unit/api/access-control.test.ts @@ -97,11 +97,10 @@ import { authRouter } from '../../../src/api/routers/auth.js'; import { organizationRouter } from '../../../src/api/routers/organization.js'; import { projectsRouter } from '../../../src/api/routers/projects.js'; import { - type TRPCContext, - type TRPCUser, adminProcedure, protectedProcedure, router, + type TRPCContext, } from '../../../src/api/trpc.js'; import { createMockUser } from '../../helpers/factories.js'; diff --git a/tests/unit/api/auth/logout.test.ts b/tests/unit/api/auth/logout.test.ts index 12ee0ae1..be9a6769 100644 --- a/tests/unit/api/auth/logout.test.ts +++ b/tests/unit/api/auth/logout.test.ts @@ -1,5 +1,5 @@ import { Hono } from 'hono'; -import { beforeEach, describe, expect, it, vi } from 'vitest'; +import { describe, expect, it, vi } from 'vitest'; const mockDeleteSession = vi.fn(); diff --git a/tests/unit/api/auth/rateLimiter.test.ts b/tests/unit/api/auth/rateLimiter.test.ts index 9fbc8731..e2e6b646 100644 --- a/tests/unit/api/auth/rateLimiter.test.ts +++ b/tests/unit/api/auth/rateLimiter.test.ts @@ -1,12 +1,12 @@ import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest'; import { - MAX_ATTEMPTS, - WINDOW_MS, _resetForTesting, _runCleanup, checkRateLimit, + MAX_ATTEMPTS, rateLimitStore, recordSuccessfulLogin, + WINDOW_MS, } from '../../../../src/api/auth/rateLimiter.js'; describe('rateLimiter', () => { diff --git a/tests/unit/api/auth/session.test.ts b/tests/unit/api/auth/session.test.ts index 33abe190..1ba066b8 100644 --- a/tests/unit/api/auth/session.test.ts +++ b/tests/unit/api/auth/session.test.ts @@ -1,4 +1,4 @@ -import { beforeEach, describe, expect, it, vi } from 'vitest'; +import { describe, expect, it, vi } from 'vitest'; const mockGetSessionByToken = vi.fn(); const mockGetUserById = vi.fn(); diff --git a/tests/unit/api/routers/_shared/triggerTypes.test.ts b/tests/unit/api/routers/_shared/triggerTypes.test.ts index 8c704371..f2914175 100644 --- a/tests/unit/api/routers/_shared/triggerTypes.test.ts +++ b/tests/unit/api/routers/_shared/triggerTypes.test.ts @@ -130,7 +130,7 @@ describe('triggerTypes', () => { }); it('all triggers have required KnownTriggerEvent fields', () => { - for (const [category, triggers] of Object.entries(TRIGGER_REGISTRY)) { + for (const [_category, triggers] of Object.entries(TRIGGER_REGISTRY)) { for (const trigger of triggers) { expect(trigger.event).toBeTruthy(); expect(trigger.label).toBeTruthy(); diff --git a/tests/unit/api/routers/agentConfigs.test.ts b/tests/unit/api/routers/agentConfigs.test.ts index 6ebdadb0..561f8a6e 100644 --- a/tests/unit/api/routers/agentConfigs.test.ts +++ b/tests/unit/api/routers/agentConfigs.test.ts @@ -65,7 +65,7 @@ vi.mock('../../../../src/agents/definitions/index.js', () => ({ })); // Mock getDb for ownership checks -const { mockDbSelect, mockDbFrom, mockDbWhere, configureOwnership } = setupOwnershipCheckMock(); +const { mockDbSelect, mockDbFrom, mockDbWhere } = setupOwnershipCheckMock(); vi.mock('../../../../src/db/client.js', () => ({ getDb: () => ({ diff --git a/tests/unit/api/routers/agentTriggerConfigs.test.ts b/tests/unit/api/routers/agentTriggerConfigs.test.ts index 92c92082..b5d3ab9b 100644 --- a/tests/unit/api/routers/agentTriggerConfigs.test.ts +++ b/tests/unit/api/routers/agentTriggerConfigs.test.ts @@ -278,7 +278,7 @@ describe('agentTriggerConfigsRouter', () => { // ===================================================================== describe('bulkUpsert', () => { it('bulk upserts multiple trigger configs', async () => { - const configs = [ + const _configs = [ createMockConfig(), createMockConfig({ id: 2, triggerEvent: 'pm:label-added' }), ]; diff --git a/tests/unit/api/routers/organization.test.ts b/tests/unit/api/routers/organization.test.ts index 24587169..69a4f0ca 100644 --- a/tests/unit/api/routers/organization.test.ts +++ b/tests/unit/api/routers/organization.test.ts @@ -1,4 +1,4 @@ -import { beforeEach, describe, expect, it, vi } from 'vitest'; +import { describe, expect, it, vi } from 'vitest'; import { createMockSuperAdmin, createMockUser } from '../../../helpers/factories.js'; import { createCallerFor, expectTRPCError } from '../../../helpers/trpcTestHarness.js'; diff --git a/tests/unit/api/routers/projects.test.ts b/tests/unit/api/routers/projects.test.ts index cfd1b1a3..732185e3 100644 --- a/tests/unit/api/routers/projects.test.ts +++ b/tests/unit/api/routers/projects.test.ts @@ -70,7 +70,7 @@ vi.mock('../../../../src/sentry.js', () => ({ })); // Mock getDb for ownership checks -const { mockDbSelect, mockDbFrom, mockDbWhere, configureOwnership } = setupOwnershipCheckMock(); +const { mockDbSelect, mockDbFrom, mockDbWhere } = setupOwnershipCheckMock(); vi.mock('../../../../src/db/client.js', () => ({ getDb: () => ({ diff --git a/tests/unit/api/routers/prompts.test.ts b/tests/unit/api/routers/prompts.test.ts index 58a4b076..f4ca89b6 100644 --- a/tests/unit/api/routers/prompts.test.ts +++ b/tests/unit/api/routers/prompts.test.ts @@ -1,4 +1,4 @@ -import { beforeEach, describe, expect, it, vi } from 'vitest'; +import { describe, expect, it, vi } from 'vitest'; import { createMockSuperAdmin, createMockUser } from '../../../helpers/factories.js'; import { createCallerFor, expectTRPCError } from '../../../helpers/trpcTestHarness.js'; diff --git a/tests/unit/api/routers/runs.test.ts b/tests/unit/api/routers/runs.test.ts index eb1e3573..c249445d 100644 --- a/tests/unit/api/routers/runs.test.ts +++ b/tests/unit/api/routers/runs.test.ts @@ -57,7 +57,7 @@ vi.mock('../../../../src/db/repositories/runsRepository.js', () => ({ })); // Mock getDb for the inline org-access check in getById -const { mockDbSelect, mockDbFrom, mockDbWhere, configureOwnership } = setupOwnershipCheckMock(); +const { mockDbSelect, mockDbFrom, mockDbWhere } = setupOwnershipCheckMock(); vi.mock('../../../../src/db/client.js', () => ({ getDb: () => ({ diff --git a/tests/unit/api/routers/users.test.ts b/tests/unit/api/routers/users.test.ts index 5cd943e3..08c2bd0a 100644 --- a/tests/unit/api/routers/users.test.ts +++ b/tests/unit/api/routers/users.test.ts @@ -1,6 +1,6 @@ import { beforeEach, describe, expect, it, vi } from 'vitest'; import { createMockSuperAdmin, createMockUser } from '../../../helpers/factories.js'; -import { createCallerFor, expectTRPCError } from '../../../helpers/trpcTestHarness.js'; +import { createCallerFor } from '../../../helpers/trpcTestHarness.js'; const { mockListOrgUsers, diff --git a/tests/unit/api/routers/webhookLogs.test.ts b/tests/unit/api/routers/webhookLogs.test.ts index f561dd44..f55e0458 100644 --- a/tests/unit/api/routers/webhookLogs.test.ts +++ b/tests/unit/api/routers/webhookLogs.test.ts @@ -1,4 +1,4 @@ -import { beforeEach, describe, expect, it, vi } from 'vitest'; +import { describe, expect, it, vi } from 'vitest'; import { createMockSuperAdmin, createMockUser } from '../../../helpers/factories.js'; import { createCallerFor, expectTRPCError } from '../../../helpers/trpcTestHarness.js'; diff --git a/tests/unit/api/routers/webhooks.test.ts b/tests/unit/api/routers/webhooks.test.ts index 696c726b..5c170c6b 100644 --- a/tests/unit/api/routers/webhooks.test.ts +++ b/tests/unit/api/routers/webhooks.test.ts @@ -1,4 +1,4 @@ -import { beforeEach, describe, expect, it, vi } from 'vitest'; +import { describe, expect, it, vi } from 'vitest'; import { createMockSuperAdmin, createMockUser } from '../../../helpers/factories.js'; import { createCallerFor, @@ -26,7 +26,7 @@ const { mockFetch: vi.fn(), })); -const { mockDbSelect, mockDbFrom, mockDbWhere, configureOwnership } = setupOwnershipCheckMock(); +const { mockDbSelect, mockDbFrom, mockDbWhere } = setupOwnershipCheckMock(); vi.mock('../../../../src/db/client.js', () => ({ getDb: () => ({ diff --git a/tests/unit/api/trpc.test.ts b/tests/unit/api/trpc.test.ts index f86cda96..7673b9b3 100644 --- a/tests/unit/api/trpc.test.ts +++ b/tests/unit/api/trpc.test.ts @@ -1,11 +1,11 @@ import { TRPCError } from '@trpc/server'; import { describe, expect, it } from 'vitest'; import { - type TRPCContext, adminProcedure, protectedProcedure, router, superAdminProcedure, + type TRPCContext, } from '../../../src/api/trpc.js'; // Create a minimal test router diff --git a/tests/unit/backends/accumulator.test.ts b/tests/unit/backends/accumulator.test.ts index 35e31a59..474125e7 100644 --- a/tests/unit/backends/accumulator.test.ts +++ b/tests/unit/backends/accumulator.test.ts @@ -8,8 +8,8 @@ import { COMPLETED_TASKS_MAX, ProgressAccumulator, RING_BUFFER_MAX, - TEXT_SNIPPETS_MAX, summarizeToolParams, + TEXT_SNIPPETS_MAX, } from '../../../src/backends/progressState/accumulator.js'; import { loadTodos } from '../../../src/gadgets/todo/storage.js'; diff --git a/tests/unit/backends/adapter.test.ts b/tests/unit/backends/adapter.test.ts index 99e3e524..cef03809 100644 --- a/tests/unit/backends/adapter.test.ts +++ b/tests/unit/backends/adapter.test.ts @@ -1,6 +1,6 @@ import { existsSync, readFileSync, writeFileSync } from 'node:fs'; -import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest'; +import { beforeEach, describe, expect, it, vi } from 'vitest'; import { mockAgentLoggerModule, mockCascadeEnvModule, @@ -120,7 +120,7 @@ import { cleanupLogFile, createFileLogger, } from '../../../src/utils/fileLogger.js'; -import { clearWatchdogCleanup, setWatchdogCleanup } from '../../../src/utils/lifecycle.js'; +import { clearWatchdogCleanup } from '../../../src/utils/lifecycle.js'; import { logger } from '../../../src/utils/logging.js'; import { cleanupTempDir } from '../../../src/utils/repo.js'; @@ -302,7 +302,7 @@ describe('executeWithEngine', () => { }); it('includes log buffer in result', async () => { - const loggerInstance = setupMocks(); + const _loggerInstance = setupMocks(); const engine = makeMockBackend(); vi.mocked(engine.execute).mockResolvedValue({ success: true, diff --git a/tests/unit/backends/agent-profiles.test.ts b/tests/unit/backends/agent-profiles.test.ts index 316ec008..89e3dc30 100644 --- a/tests/unit/backends/agent-profiles.test.ts +++ b/tests/unit/backends/agent-profiles.test.ts @@ -156,8 +156,11 @@ vi.mock('node:child_process', () => ({ })); import { execFileSync } from 'node:child_process'; -import { needsGitStateStopHooks } from '../../../src/agents/definitions/profiles.js'; -import { type AgentProfile, getAgentProfile } from '../../../src/agents/definitions/profiles.js'; +import { + type AgentProfile, + getAgentProfile, + needsGitStateStopHooks, +} from '../../../src/agents/definitions/profiles.js'; import { formatPRComments, formatPRDetails, @@ -172,7 +175,7 @@ import { resolveSquintDbPath } from '../../../src/utils/squintDb.js'; const mockExecFileSync = vi.mocked(execFileSync); const mockResolveSquintDbPath = vi.mocked(resolveSquintDbPath); -const mockReadWorkItem = vi.mocked(readWorkItem); +const _mockReadWorkItem = vi.mocked(readWorkItem); const mockReadWorkItemWithMedia = vi.mocked(readWorkItemWithMedia); const mockGithub = vi.mocked(githubClient); diff --git a/tests/unit/backends/claude-code-hooks.test.ts b/tests/unit/backends/claude-code-hooks.test.ts index 201ad14f..cf06da18 100644 --- a/tests/unit/backends/claude-code-hooks.test.ts +++ b/tests/unit/backends/claude-code-hooks.test.ts @@ -5,7 +5,7 @@ import type { PreToolUseHookInput, StopHookInput, } from '@anthropic-ai/claude-agent-sdk'; -import { beforeEach, describe, expect, it, vi } from 'vitest'; +import { describe, expect, it, vi } from 'vitest'; import { buildHooks, buildPostToolUseFailureHooks, diff --git a/tests/unit/backends/claude-code-imagePrompt.test.ts b/tests/unit/backends/claude-code-imagePrompt.test.ts index 23c7af05..4c2ac335 100644 --- a/tests/unit/backends/claude-code-imagePrompt.test.ts +++ b/tests/unit/backends/claude-code-imagePrompt.test.ts @@ -1,4 +1,3 @@ -import type { SDKUserMessage } from '@anthropic-ai/claude-agent-sdk'; import type { ContentBlockParam } from '@anthropic-ai/sdk/resources'; import { describe, expect, it, vi } from 'vitest'; import type { ContextImage } from '../../../src/agents/contracts/index.js'; diff --git a/tests/unit/backends/claude-code.test.ts b/tests/unit/backends/claude-code.test.ts index 2d589fd0..c07a82a9 100644 --- a/tests/unit/backends/claude-code.test.ts +++ b/tests/unit/backends/claude-code.test.ts @@ -20,17 +20,17 @@ import { tmpdir } from 'node:os'; import { join } from 'node:path'; import { query } from '@anthropic-ai/claude-agent-sdk'; import { - ClaudeCodeEngine, buildEnv, buildSystemPrompt, buildTaskPrompt, buildToolGuidance, + ClaudeCodeEngine, ensureOnboardingFlag, resolveClaudeModel, } from '../../../src/backends/claude-code/index.js'; import { - CLAUDE_CODE_MODELS, CLAUDE_CODE_MODEL_IDS, + CLAUDE_CODE_MODELS, DEFAULT_CLAUDE_CODE_MODEL, } from '../../../src/backends/claude-code/models.js'; import { resolveClaudeCodeSettings } from '../../../src/backends/claude-code/settings.js'; diff --git a/tests/unit/backends/codex.test.ts b/tests/unit/backends/codex.test.ts index f4db8b4c..a8e77ea4 100644 --- a/tests/unit/backends/codex.test.ts +++ b/tests/unit/backends/codex.test.ts @@ -41,8 +41,8 @@ vi.mock('../../../src/utils/logging.js', () => ({ import { buildEnv } from '../../../src/backends/codex/env.js'; import { - CodexEngine, buildArgs, + CodexEngine, extractErrorMessage, extractTextParts, extractToolCall, diff --git a/tests/unit/backends/githubPoster.test.ts b/tests/unit/backends/githubPoster.test.ts index 0e68bc5f..6aac0b44 100644 --- a/tests/unit/backends/githubPoster.test.ts +++ b/tests/unit/backends/githubPoster.test.ts @@ -1,4 +1,4 @@ -import { beforeEach, describe, expect, it, vi } from 'vitest'; +import { describe, expect, it, vi } from 'vitest'; vi.mock('../../../src/github/client.js', () => ({ githubClient: { diff --git a/tests/unit/backends/nativeToolRuntime.test.ts b/tests/unit/backends/nativeToolRuntime.test.ts index bc152a68..81637de7 100644 --- a/tests/unit/backends/nativeToolRuntime.test.ts +++ b/tests/unit/backends/nativeToolRuntime.test.ts @@ -1,5 +1,5 @@ -import { constants, accessSync, existsSync, readFileSync } from 'node:fs'; -import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest'; +import { accessSync, constants, existsSync, readFileSync } from 'node:fs'; +import { afterEach, describe, expect, it, vi } from 'vitest'; import { buildNativeToolPath, @@ -71,7 +71,7 @@ describe('createNativeToolRuntimeArtifacts', () => { }); it('cleanup does not throw even if the directory was already removed', () => { - const { shimDir, cleanup } = createNativeToolRuntimeArtifacts(); + const { cleanup } = createNativeToolRuntimeArtifacts(); cleanupFns.push(cleanup); // Remove the directory manually first diff --git a/tests/unit/backends/opencode-stream.test.ts b/tests/unit/backends/opencode-stream.test.ts index d53e35e0..1c85fc7c 100644 --- a/tests/unit/backends/opencode-stream.test.ts +++ b/tests/unit/backends/opencode-stream.test.ts @@ -38,6 +38,7 @@ vi.mock('../../../src/backends/nativeToolRetry.js', () => ({ // ── Imports after mocks ────────────────────────────────────────────────────── +import type { OpenCodeStreamState } from '../../../src/backends/opencode/stream.js'; import { appendPartialOutput, getPartialOutput, @@ -46,7 +47,6 @@ import { handleSessionTerminalEvent, reportToolPart, } from '../../../src/backends/opencode/stream.js'; -import type { OpenCodeStreamState } from '../../../src/backends/opencode/stream.js'; import type { AgentExecutionPlan } from '../../../src/backends/types.js'; // ── Helpers ────────────────────────────────────────────────────────────────── @@ -757,11 +757,7 @@ describe('handlePermissionEvent', () => { }); function makePermissionEvent( - overrides: { - sessionID?: string; - permissionId?: string; - type?: string; - } = {}, + overrides: { sessionID?: string; permissionId?: string; type?: string } = {}, ) { return { type: 'permission.updated', diff --git a/tests/unit/backends/opencode.test.ts b/tests/unit/backends/opencode.test.ts index dbd085a3..95ac48ec 100644 --- a/tests/unit/backends/opencode.test.ts +++ b/tests/unit/backends/opencode.test.ts @@ -36,8 +36,8 @@ vi.mock('../../../src/utils/logging.js', () => ({ })); import { - OpenCodeEngine, buildPermissionConfig, + OpenCodeEngine, resolveOpenCodeModel, } from '../../../src/backends/opencode/index.js'; import { DEFAULT_OPENCODE_MODEL } from '../../../src/backends/opencode/models.js'; diff --git a/tests/unit/backends/pmPoster.test.ts b/tests/unit/backends/pmPoster.test.ts index 6bcf4154..e87defd0 100644 --- a/tests/unit/backends/pmPoster.test.ts +++ b/tests/unit/backends/pmPoster.test.ts @@ -30,11 +30,11 @@ vi.mock('../../../src/backends/progressState.js', () => ({ clearProgressCommentId: vi.fn(), })); +import { PMProgressPoster } from '../../../src/backends/progressState/pmPoster.js'; import { readProgressCommentId, writeProgressCommentId, } from '../../../src/backends/progressState.js'; -import { PMProgressPoster } from '../../../src/backends/progressState/pmPoster.js'; import type { PMProvider } from '../../../src/pm/index.js'; import { getPMProviderOrNull } from '../../../src/pm/index.js'; diff --git a/tests/unit/backends/postProcess.test.ts b/tests/unit/backends/postProcess.test.ts index 5310f3b4..f9a3ba5f 100644 --- a/tests/unit/backends/postProcess.test.ts +++ b/tests/unit/backends/postProcess.test.ts @@ -1,4 +1,4 @@ -import { beforeEach, describe, expect, it, vi } from 'vitest'; +import { describe, expect, it, vi } from 'vitest'; vi.mock('../../../src/utils/logging.js', () => ({ logger: { diff --git a/tests/unit/backends/progressModel.test.ts b/tests/unit/backends/progressModel.test.ts index ad6ecdd9..3f5cbc32 100644 --- a/tests/unit/backends/progressModel.test.ts +++ b/tests/unit/backends/progressModel.test.ts @@ -1,4 +1,4 @@ -import { beforeEach, describe, expect, it, vi } from 'vitest'; +import { describe, expect, it, vi } from 'vitest'; const mockTextComplete = vi.fn(); vi.mock('llmist', async (importOriginal) => ({ @@ -32,7 +32,7 @@ vi.mock('../../../src/config/agentMessages.js', () => ({ })); import { LLMist } from 'llmist'; -import { type ProgressContext, callProgressModel } from '../../../src/backends/progressModel.js'; +import { callProgressModel, type ProgressContext } from '../../../src/backends/progressModel.js'; const MockLLMist = vi.mocked(LLMist); diff --git a/tests/unit/backends/progressState.test.ts b/tests/unit/backends/progressState.test.ts index dd97365d..8e61e21d 100644 --- a/tests/unit/backends/progressState.test.ts +++ b/tests/unit/backends/progressState.test.ts @@ -1,8 +1,8 @@ import { afterEach, beforeEach, describe, expect, it } from 'vitest'; import { - ENV_VAR_NAME, clearProgressCommentId, + ENV_VAR_NAME, readProgressCommentId, writeProgressCommentId, } from '../../../src/backends/progressState.js'; diff --git a/tests/unit/backends/registry.test.ts b/tests/unit/backends/registry.test.ts index 6a229a87..6e3c2933 100644 --- a/tests/unit/backends/registry.test.ts +++ b/tests/unit/backends/registry.test.ts @@ -1,4 +1,4 @@ -import { beforeEach, describe, expect, it } from 'vitest'; +import { describe, expect, it } from 'vitest'; import { getEngine, getRegisteredEngines, diff --git a/tests/unit/backends/secretBuilder.test.ts b/tests/unit/backends/secretBuilder.test.ts index 78ece655..af1eecea 100644 --- a/tests/unit/backends/secretBuilder.test.ts +++ b/tests/unit/backends/secretBuilder.test.ts @@ -11,8 +11,8 @@ vi.mock('../../../src/github/personas.js', () => ({ import type { AgentProfile } from '../../../src/agents/definitions/profiles.js'; import { ENV_VAR_NAME } from '../../../src/backends/progressState.js'; import { - GITHUB_ACK_COMMENT_ID_ENV_VAR, augmentProjectSecrets, + GITHUB_ACK_COMMENT_ID_ENV_VAR, injectGitHubAckCommentId, injectProgressCommentId, resolveGitHubToken, diff --git a/tests/unit/backends/shared-envBuilder.test.ts b/tests/unit/backends/shared-envBuilder.test.ts index 4ee29f9d..41441774 100644 --- a/tests/unit/backends/shared-envBuilder.test.ts +++ b/tests/unit/backends/shared-envBuilder.test.ts @@ -1,4 +1,4 @@ -import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest'; +import { afterEach, beforeEach, describe, expect, it } from 'vitest'; import { buildEngineEnv } from '../../../src/backends/shared/envBuilder.js'; import { SHARED_ALLOWED_ENV_EXACT, diff --git a/tests/unit/backends/shared-envFilter.test.ts b/tests/unit/backends/shared-envFilter.test.ts index 61ec6894..6f8e63be 100644 --- a/tests/unit/backends/shared-envFilter.test.ts +++ b/tests/unit/backends/shared-envFilter.test.ts @@ -1,10 +1,10 @@ import { describe, expect, it } from 'vitest'; import { GITHUB_ACK_COMMENT_ID_ENV_VAR } from '../../../src/backends/secretBuilder.js'; import { + filterProcessEnv, SHARED_ALLOWED_ENV_EXACT, SHARED_ALLOWED_ENV_PREFIXES, SHARED_BLOCKED_ENV_EXACT, - filterProcessEnv, } from '../../../src/backends/shared/envFilter.js'; describe('filterProcessEnv (shared)', () => { diff --git a/tests/unit/backends/shared-llmCallLogger.test.ts b/tests/unit/backends/shared-llmCallLogger.test.ts index 301beda7..0094c0bd 100644 --- a/tests/unit/backends/shared-llmCallLogger.test.ts +++ b/tests/unit/backends/shared-llmCallLogger.test.ts @@ -1,4 +1,4 @@ -import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest'; +import { afterEach, describe, expect, it, vi } from 'vitest'; import type { LlmCallLogPayload } from '../../../src/backends/shared/llmCallLogger.js'; import { logLlmCall } from '../../../src/backends/shared/llmCallLogger.js'; diff --git a/tests/unit/backends/shared-nativeToolPrompts.test.ts b/tests/unit/backends/shared-nativeToolPrompts.test.ts index 135b13c5..ec11b7f8 100644 --- a/tests/unit/backends/shared-nativeToolPrompts.test.ts +++ b/tests/unit/backends/shared-nativeToolPrompts.test.ts @@ -1,6 +1,5 @@ import { beforeEach, describe, expect, it, vi } from 'vitest'; -import type { ToolManifest } from '../../../src/agents/contracts/index.js'; -import type { ContextInjection } from '../../../src/agents/contracts/index.js'; +import type { ContextInjection, ToolManifest } from '../../../src/agents/contracts/index.js'; // Mock contextFiles module to avoid filesystem I/O vi.mock('../../../src/backends/shared/contextFiles.js', () => ({ diff --git a/tests/unit/backends/sidecarManager.test.ts b/tests/unit/backends/sidecarManager.test.ts index a8d94cb4..8a3bfcd9 100644 --- a/tests/unit/backends/sidecarManager.test.ts +++ b/tests/unit/backends/sidecarManager.test.ts @@ -2,7 +2,7 @@ import { existsSync, writeFileSync } from 'node:fs'; import { tmpdir } from 'node:os'; import { join } from 'node:path'; -import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest'; +import { describe, expect, it, vi } from 'vitest'; vi.mock('../../../src/gadgets/sessionState.js', () => ({ REVIEW_SIDECAR_ENV_VAR: 'CASCADE_REVIEW_SIDECAR_PATH', diff --git a/tests/unit/cli/cli-command-factory.test.ts b/tests/unit/cli/cli-command-factory.test.ts index eb2a4664..830a98c0 100644 --- a/tests/unit/cli/cli-command-factory.test.ts +++ b/tests/unit/cli/cli-command-factory.test.ts @@ -41,7 +41,7 @@ import { createCLICommand } from '../../../src/gadgets/shared/cliCommandFactory. import type { ToolDefinition } from '../../../src/gadgets/shared/toolDefinition.js'; /** Minimal oclif config to satisfy this.parse() */ -const mockConfig = { runHook: vi.fn().mockResolvedValue({ successes: [], failures: [] }) }; +const _mockConfig = { runHook: vi.fn().mockResolvedValue({ successes: [], failures: [] }) }; let tmpDir: string; diff --git a/tests/unit/cli/dashboard/client.test.ts b/tests/unit/cli/dashboard/client.test.ts index 1cfb4670..7399216a 100644 --- a/tests/unit/cli/dashboard/client.test.ts +++ b/tests/unit/cli/dashboard/client.test.ts @@ -1,4 +1,4 @@ -import { beforeEach, describe, expect, it, vi } from 'vitest'; +import { describe, expect, it, vi } from 'vitest'; vi.mock('@trpc/client', () => ({ createTRPCClient: vi.fn(() => ({ mock: 'client' })), diff --git a/tests/unit/cli/dashboard/prompts/prompts.test.ts b/tests/unit/cli/dashboard/prompts/prompts.test.ts index c27a4be4..c2150e33 100644 --- a/tests/unit/cli/dashboard/prompts/prompts.test.ts +++ b/tests/unit/cli/dashboard/prompts/prompts.test.ts @@ -26,8 +26,8 @@ vi.mock('node:fs', () => ({ readFileSync: vi.fn().mockReturnValue('template content'), })); -import PromptsDefaultPartial from '../../../../../src/cli/dashboard/prompts/default-partial.js'; import PromptsDefault from '../../../../../src/cli/dashboard/prompts/default.js'; +import PromptsDefaultPartial from '../../../../../src/cli/dashboard/prompts/default-partial.js'; import PromptsGetPartial from '../../../../../src/cli/dashboard/prompts/get-partial.js'; import PromptsListPartials from '../../../../../src/cli/dashboard/prompts/list-partials.js'; import PromptsResetPartial from '../../../../../src/cli/dashboard/prompts/reset-partial.js'; diff --git a/tests/unit/cli/dashboard/spinner.test.ts b/tests/unit/cli/dashboard/spinner.test.ts index 140dfab5..62a5c8c6 100644 --- a/tests/unit/cli/dashboard/spinner.test.ts +++ b/tests/unit/cli/dashboard/spinner.test.ts @@ -1,4 +1,4 @@ -import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest'; +import { beforeEach, describe, expect, it, vi } from 'vitest'; const mockOraInstance = { start: vi.fn().mockReturnThis(), diff --git a/tests/unit/cli/file-input-flags.test.ts b/tests/unit/cli/file-input-flags.test.ts index 45de6abd..70d40870 100644 --- a/tests/unit/cli/file-input-flags.test.ts +++ b/tests/unit/cli/file-input-flags.test.ts @@ -40,17 +40,16 @@ vi.mock('../../../src/gadgets/github/core/postPRComment.js', () => ({ postPRComment: vi.fn().mockResolvedValue({ id: 123 }), })); -import { createPR } from '../../../src/gadgets/github/core/createPR.js'; -import { postPRComment } from '../../../src/gadgets/github/core/postPRComment.js'; -import { createWorkItem } from '../../../src/gadgets/pm/core/createWorkItem.js'; -import { postComment } from '../../../src/gadgets/pm/core/postComment.js'; -import { updateWorkItem } from '../../../src/gadgets/pm/core/updateWorkItem.js'; - import CreateWorkItem from '../../../src/cli/pm/create-work-item.js'; import PostComment from '../../../src/cli/pm/post-comment.js'; import UpdateWorkItem from '../../../src/cli/pm/update-work-item.js'; import CreatePR from '../../../src/cli/scm/create-pr.js'; import PostPRComment from '../../../src/cli/scm/post-pr-comment.js'; +import { createPR } from '../../../src/gadgets/github/core/createPR.js'; +import { postPRComment } from '../../../src/gadgets/github/core/postPRComment.js'; +import { createWorkItem } from '../../../src/gadgets/pm/core/createWorkItem.js'; +import { postComment } from '../../../src/gadgets/pm/core/postComment.js'; +import { updateWorkItem } from '../../../src/gadgets/pm/core/updateWorkItem.js'; let tmpDir: string; diff --git a/tests/unit/cli/pm/pm-commands.test.ts b/tests/unit/cli/pm/pm-commands.test.ts index 32dfd25e..91b3dcd8 100644 --- a/tests/unit/cli/pm/pm-commands.test.ts +++ b/tests/unit/cli/pm/pm-commands.test.ts @@ -59,14 +59,6 @@ vi.mock('../../../../src/gadgets/pm/core/postComment.js', () => ({ postComment: vi.fn().mockResolvedValue({ id: 'comment-1' }), })); -import { createWorkItem } from '../../../../src/gadgets/pm/core/createWorkItem.js'; -import { deleteChecklistItem } from '../../../../src/gadgets/pm/core/deleteChecklistItem.js'; -import { listWorkItems } from '../../../../src/gadgets/pm/core/listWorkItems.js'; -import { moveWorkItem } from '../../../../src/gadgets/pm/core/moveWorkItem.js'; -import { postComment } from '../../../../src/gadgets/pm/core/postComment.js'; -import { readWorkItem } from '../../../../src/gadgets/pm/core/readWorkItem.js'; -import { updateChecklistItem } from '../../../../src/gadgets/pm/core/updateChecklistItem.js'; - import CreateWorkItem from '../../../../src/cli/pm/create-work-item.js'; import DeleteChecklistItem from '../../../../src/cli/pm/delete-checklist-item.js'; import ListWorkItems from '../../../../src/cli/pm/list-work-items.js'; @@ -74,6 +66,13 @@ import MoveWorkItem from '../../../../src/cli/pm/move-work-item.js'; import PostComment from '../../../../src/cli/pm/post-comment.js'; import ReadWorkItem from '../../../../src/cli/pm/read-work-item.js'; import UpdateChecklistItem from '../../../../src/cli/pm/update-checklist-item.js'; +import { createWorkItem } from '../../../../src/gadgets/pm/core/createWorkItem.js'; +import { deleteChecklistItem } from '../../../../src/gadgets/pm/core/deleteChecklistItem.js'; +import { listWorkItems } from '../../../../src/gadgets/pm/core/listWorkItems.js'; +import { moveWorkItem } from '../../../../src/gadgets/pm/core/moveWorkItem.js'; +import { postComment } from '../../../../src/gadgets/pm/core/postComment.js'; +import { readWorkItem } from '../../../../src/gadgets/pm/core/readWorkItem.js'; +import { updateChecklistItem } from '../../../../src/gadgets/pm/core/updateChecklistItem.js'; /** Create a fresh minimal oclif config to satisfy this.parse() in each test */ function makeMockConfig() { diff --git a/tests/unit/cli/scm/scm-commands.test.ts b/tests/unit/cli/scm/scm-commands.test.ts index e8918ee3..36636de9 100644 --- a/tests/unit/cli/scm/scm-commands.test.ts +++ b/tests/unit/cli/scm/scm-commands.test.ts @@ -63,15 +63,6 @@ vi.mock('../../../../src/gadgets/github/core/updatePRComment.js', () => ({ updatePRComment: vi.fn().mockResolvedValue({ id: 300, body: 'Updated' }), })); -import { getCIRunLogs } from '../../../../src/gadgets/github/core/getCIRunLogs.js'; -import { getPRChecks } from '../../../../src/gadgets/github/core/getPRChecks.js'; -import { getPRComments } from '../../../../src/gadgets/github/core/getPRComments.js'; -import { getPRDetails } from '../../../../src/gadgets/github/core/getPRDetails.js'; -import { getPRDiff } from '../../../../src/gadgets/github/core/getPRDiff.js'; -import { postPRComment } from '../../../../src/gadgets/github/core/postPRComment.js'; -import { replyToReviewComment } from '../../../../src/gadgets/github/core/replyToReviewComment.js'; -import { updatePRComment } from '../../../../src/gadgets/github/core/updatePRComment.js'; - import GetCIRunLogs from '../../../../src/cli/scm/get-ci-run-logs.js'; import GetPRChecks from '../../../../src/cli/scm/get-pr-checks.js'; import GetPRComments from '../../../../src/cli/scm/get-pr-comments.js'; @@ -80,6 +71,14 @@ import GetPRDiff from '../../../../src/cli/scm/get-pr-diff.js'; import PostPRComment from '../../../../src/cli/scm/post-pr-comment.js'; import ReplyToReviewComment from '../../../../src/cli/scm/reply-to-review-comment.js'; import UpdatePRComment from '../../../../src/cli/scm/update-pr-comment.js'; +import { getCIRunLogs } from '../../../../src/gadgets/github/core/getCIRunLogs.js'; +import { getPRChecks } from '../../../../src/gadgets/github/core/getPRChecks.js'; +import { getPRComments } from '../../../../src/gadgets/github/core/getPRComments.js'; +import { getPRDetails } from '../../../../src/gadgets/github/core/getPRDetails.js'; +import { getPRDiff } from '../../../../src/gadgets/github/core/getPRDiff.js'; +import { postPRComment } from '../../../../src/gadgets/github/core/postPRComment.js'; +import { replyToReviewComment } from '../../../../src/gadgets/github/core/replyToReviewComment.js'; +import { updatePRComment } from '../../../../src/gadgets/github/core/updatePRComment.js'; /** Create a fresh minimal oclif config to satisfy this.parse() in each test */ function makeMockConfig() { diff --git a/tests/unit/config/agentMessages.test.ts b/tests/unit/config/agentMessages.test.ts index 231f37ba..477b28a1 100644 --- a/tests/unit/config/agentMessages.test.ts +++ b/tests/unit/config/agentMessages.test.ts @@ -11,11 +11,11 @@ vi.mock('../../../src/agents/definitions/index.js', () => ({ })); import { + _resetAgentMessages, AGENT_LABELS, AGENT_ROLE_HINTS, - INITIAL_MESSAGES, - _resetAgentMessages, getAgentLabel, + INITIAL_MESSAGES, initAgentMessages, } from '../../../src/config/agentMessages.js'; diff --git a/tests/unit/config/integrationRoles.test.ts b/tests/unit/config/integrationRoles.test.ts index 8132e14c..42ee7882 100644 --- a/tests/unit/config/integrationRoles.test.ts +++ b/tests/unit/config/integrationRoles.test.ts @@ -1,11 +1,11 @@ import { afterEach, describe, expect, it } from 'vitest'; import { + getCredentialRoles, type IntegrationCategory, type IntegrationProvider, PROVIDER_CATEGORY, PROVIDER_CREDENTIAL_ROLES, - getCredentialRoles, registerCredentialRoles, } from '../../../src/config/integrationRoles.js'; @@ -28,7 +28,7 @@ describe.concurrent('PROVIDER_CATEGORY', () => { it('maps all known providers to valid categories', () => { const validCategories: IntegrationCategory[] = ['pm', 'scm', 'alerting']; - for (const [provider, category] of Object.entries(PROVIDER_CATEGORY)) { + for (const [_provider, category] of Object.entries(PROVIDER_CATEGORY)) { expect(validCategories).toContain(category); } }); diff --git a/tests/unit/config/projects.test.ts b/tests/unit/config/projects.test.ts index 6fb99e6c..06b1a295 100644 --- a/tests/unit/config/projects.test.ts +++ b/tests/unit/config/projects.test.ts @@ -1,4 +1,4 @@ -import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest'; +import { beforeEach, describe, expect, it, vi } from 'vitest'; // Mock the DB repositories vi.mock('../../../src/db/repositories/configRepository.js', () => ({ diff --git a/tests/unit/config/provider.test.ts b/tests/unit/config/provider.test.ts index 40b60011..35e0a815 100644 --- a/tests/unit/config/provider.test.ts +++ b/tests/unit/config/provider.test.ts @@ -1,4 +1,4 @@ -import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest'; +import { afterEach, describe, expect, it, vi } from 'vitest'; // Mock DB repositories first (must be before imports) vi.mock('../../../src/db/repositories/configRepository.js', () => ({ diff --git a/tests/unit/config/rateLimits.test.ts b/tests/unit/config/rateLimits.test.ts index f90f1f26..d6df07bb 100644 --- a/tests/unit/config/rateLimits.test.ts +++ b/tests/unit/config/rateLimits.test.ts @@ -1,6 +1,6 @@ import { describe, expect, it } from 'vitest'; -import { MODEL_RATE_LIMITS, getRateLimitForModel } from '../../../src/config/rateLimits.js'; +import { getRateLimitForModel, MODEL_RATE_LIMITS } from '../../../src/config/rateLimits.js'; describe.concurrent('config/rateLimits', () => { describe('getRateLimitForModel', () => { @@ -101,7 +101,7 @@ describe.concurrent('config/rateLimits', () => { }); it('safety margin is between 0 and 1', () => { - for (const [modelId, config] of Object.entries(MODEL_RATE_LIMITS)) { + for (const [_modelId, config] of Object.entries(MODEL_RATE_LIMITS)) { expect(config.safetyMargin).toBeGreaterThan(0); expect(config.safetyMargin).toBeLessThanOrEqual(1); } diff --git a/tests/unit/config/reviewConfig.test.ts b/tests/unit/config/reviewConfig.test.ts index 280fbd3d..c544f38b 100644 --- a/tests/unit/config/reviewConfig.test.ts +++ b/tests/unit/config/reviewConfig.test.ts @@ -1,8 +1,8 @@ import { describe, expect, it } from 'vitest'; import { - REVIEW_FILE_CONTENT_TOKEN_LIMIT, estimateTokens, + REVIEW_FILE_CONTENT_TOKEN_LIMIT, } from '../../../src/config/reviewConfig.js'; describe.concurrent('config/reviewConfig', () => { diff --git a/tests/unit/config/statusUpdateConfig.test.ts b/tests/unit/config/statusUpdateConfig.test.ts index 7cae819d..1470e21c 100644 --- a/tests/unit/config/statusUpdateConfig.test.ts +++ b/tests/unit/config/statusUpdateConfig.test.ts @@ -1,4 +1,4 @@ -import { afterEach, describe, expect, it, vi } from 'vitest'; +import { describe, expect, it, vi } from 'vitest'; import { getAgentLabel } from '../../../src/config/agentMessages.js'; import { diff --git a/tests/unit/db/client.test.ts b/tests/unit/db/client.test.ts index a2cc6e44..63f22275 100644 --- a/tests/unit/db/client.test.ts +++ b/tests/unit/db/client.test.ts @@ -32,10 +32,10 @@ vi.mock('node:fs', () => ({ // ── Imports (after mocks) ───────────────────────────────────────────────────── import { - DatabaseContext, _setTestDb, closeDb, createDatabaseContext, + DatabaseContext, getDb, setDefaultDatabaseContext, } from '../../../src/db/client.js'; diff --git a/tests/unit/db/crypto.test.ts b/tests/unit/db/crypto.test.ts index 68edac00..5bdc0b9c 100644 --- a/tests/unit/db/crypto.test.ts +++ b/tests/unit/db/crypto.test.ts @@ -1,5 +1,5 @@ import { randomBytes } from 'node:crypto'; -import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest'; +import { beforeEach, describe, expect, it, vi } from 'vitest'; import { decryptCredential, encryptCredential, diff --git a/tests/unit/db/repositories/configMapper.test.ts b/tests/unit/db/repositories/configMapper.test.ts index 42e0bbc2..e422ebf2 100644 --- a/tests/unit/db/repositories/configMapper.test.ts +++ b/tests/unit/db/repositories/configMapper.test.ts @@ -2,10 +2,10 @@ import { describe, expect, it } from 'vitest'; import { type AgentConfigRow, - type IntegrationRow, - type MapProjectInput, buildAgentMaps, extractIntegrationConfigs, + type IntegrationRow, + type MapProjectInput, mapProjectRow, orUndefined, } from '../../../../src/db/repositories/configMapper.js'; diff --git a/tests/unit/db/repositories/configRepository.test.ts b/tests/unit/db/repositories/configRepository.test.ts index 16231957..58481daa 100644 --- a/tests/unit/db/repositories/configRepository.test.ts +++ b/tests/unit/db/repositories/configRepository.test.ts @@ -1,4 +1,4 @@ -import { afterEach, describe, expect, it, vi } from 'vitest'; +import { describe, expect, it, vi } from 'vitest'; import { mockDbClientModule, mockGetDb } from '../../../helpers/sharedMocks.js'; vi.mock('../../../../src/db/client.js', () => mockDbClientModule); diff --git a/tests/unit/db/repositories/runStatsRepository.test.ts b/tests/unit/db/repositories/runStatsRepository.test.ts index 4e11bace..432fd0d4 100644 --- a/tests/unit/db/repositories/runStatsRepository.test.ts +++ b/tests/unit/db/repositories/runStatsRepository.test.ts @@ -50,8 +50,6 @@ vi.mock('../../../../src/db/repositories/joinHelpers.js', () => ({ buildAgentRunWorkItemJoin: () => 'mock-join-condition', })); -import { mockGetDb } from '../../../helpers/sharedMocks.js'; - import { type AggregatedProjectStats, getProjectWorkStats, @@ -61,6 +59,7 @@ import { listProjectsForOrg, listRuns, } from '../../../../src/db/repositories/runStatsRepository.js'; +import { mockGetDb } from '../../../helpers/sharedMocks.js'; // ============================================================================ // Test helper diff --git a/tests/unit/db/repositories/runsRepository-concurrency.test.ts b/tests/unit/db/repositories/runsRepository-concurrency.test.ts index 744b631b..4e97fd00 100644 --- a/tests/unit/db/repositories/runsRepository-concurrency.test.ts +++ b/tests/unit/db/repositories/runsRepository-concurrency.test.ts @@ -79,8 +79,6 @@ vi.mock('../../../../src/db/repositories/runStatsRepository.js', () => ({ getProjectWorkStatsAggregated: vi.fn(), })); -import { mockGetDb } from '../../../helpers/sharedMocks.js'; - import { cancelRunById, countActiveRuns, @@ -88,6 +86,7 @@ import { failOrphanedRunFallback, hasActiveRunForWorkItem, } from '../../../../src/db/repositories/runsRepository.js'; +import { mockGetDb } from '../../../helpers/sharedMocks.js'; // ============================================================================ // Test helpers diff --git a/tests/unit/db/repositories/runsRepository.dashboard.test.ts b/tests/unit/db/repositories/runsRepository.dashboard.test.ts index 3cf1d2f8..3a0332fa 100644 --- a/tests/unit/db/repositories/runsRepository.dashboard.test.ts +++ b/tests/unit/db/repositories/runsRepository.dashboard.test.ts @@ -1,4 +1,4 @@ -import { beforeEach, describe, expect, it, vi } from 'vitest'; +import { describe, expect, it, vi } from 'vitest'; const mockSelect = vi.fn(); diff --git a/tests/unit/db/repositories/webhookLogsRepository.test.ts b/tests/unit/db/repositories/webhookLogsRepository.test.ts index 4fc55c43..8d6a203a 100644 --- a/tests/unit/db/repositories/webhookLogsRepository.test.ts +++ b/tests/unit/db/repositories/webhookLogsRepository.test.ts @@ -1,5 +1,4 @@ import { beforeEach, describe, expect, it, vi } from 'vitest'; -import { createMockDbWithGetDb } from '../../../helpers/mockDb.js'; import { mockDbClientModule } from '../../../helpers/sharedMocks.js'; vi.mock('../../../../src/db/client.js', () => mockDbClientModule); @@ -22,8 +21,6 @@ vi.mock('../../../../src/db/schema/index.js', () => ({ }, })); -import { mockGetDb } from '../../../helpers/sharedMocks.js'; - import { getWebhookLogById, getWebhookLogStats, @@ -31,6 +28,7 @@ import { listWebhookLogs, pruneWebhookLogs, } from '../../../../src/db/repositories/webhookLogsRepository.js'; +import { mockGetDb } from '../../../helpers/sharedMocks.js'; // Helper to build a chainable mock db function buildMockDb() { diff --git a/tests/unit/gadgets/astGrep.test.ts b/tests/unit/gadgets/astGrep.test.ts index 34634707..508aa051 100644 --- a/tests/unit/gadgets/astGrep.test.ts +++ b/tests/unit/gadgets/astGrep.test.ts @@ -54,7 +54,6 @@ vi.mock('../../../src/gadgets/shared/postEditChecks.js', () => ({ import { AstGrep } from '../../../src/gadgets/AstGrep.js'; import { assertFileRead } from '../../../src/gadgets/readTracking.js'; import { getSessionState } from '../../../src/gadgets/sessionState.js'; -import { validatePath } from '../../../src/gadgets/shared/pathValidation.js'; import { runPostEditChecks } from '../../../src/gadgets/shared/postEditChecks.js'; const mockGetSessionState = vi.mocked(getSessionState); diff --git a/tests/unit/gadgets/github.test.ts b/tests/unit/gadgets/github.test.ts index 5ea792fe..66c530f6 100644 --- a/tests/unit/gadgets/github.test.ts +++ b/tests/unit/gadgets/github.test.ts @@ -1,4 +1,4 @@ -import { beforeEach, describe, expect, it, vi } from 'vitest'; +import { describe, expect, it, vi } from 'vitest'; import { mockGitHubClientModule } from '../../helpers/sharedMocks.js'; // Mock session state @@ -37,7 +37,7 @@ function mockRunCommand( args?: string[], ) => Promise<{ stdout: string; stderr: string; exitCode: number }>, ) { - vi.mocked(runCommand).mockImplementation(async (cmd, args, cwd) => { + vi.mocked(runCommand).mockImplementation(async (cmd, args, _cwd) => { // Auto-detect owner/repo from git remote if (args?.[0] === 'remote') { return { stdout: REMOTE_URL, stderr: '', exitCode: 0 }; diff --git a/tests/unit/gadgets/github/core/createPR.test.ts b/tests/unit/gadgets/github/core/createPR.test.ts index edb73a48..ca4f490b 100644 --- a/tests/unit/gadgets/github/core/createPR.test.ts +++ b/tests/unit/gadgets/github/core/createPR.test.ts @@ -1,4 +1,4 @@ -import { beforeEach, describe, expect, it, vi } from 'vitest'; +import { describe, expect, it, vi } from 'vitest'; vi.mock('../../../../../src/github/client.js', () => ({ githubClient: { @@ -54,7 +54,7 @@ describe('detectOwnerRepo (tested through createPR)', () => { htmlUrl: 'https://github.com/test-owner/test-repo/pull/1', } as Awaited>); - const result = await createPR({ + const _result = await createPR({ title: 'Test', body: 'Body', head: 'feat', @@ -152,7 +152,7 @@ describe('detectOwnerRepo (tested through createPR)', () => { describe('stageAndCommit (tested through createPR)', () => { it('stages tracked changes and commits', async () => { const calls: string[][] = []; - mockRunCommand.mockImplementation(async (cmd, args) => { + mockRunCommand.mockImplementation(async (_cmd, args) => { calls.push(args || []); if (args?.[0] === 'remote') { return { stdout: HTTPS_URL, stderr: '', exitCode: 0 }; @@ -189,7 +189,7 @@ describe('stageAndCommit (tested through createPR)', () => { it('stages untracked files individually', async () => { const calls: string[][] = []; - mockRunCommand.mockImplementation(async (cmd, args) => { + mockRunCommand.mockImplementation(async (_cmd, args) => { calls.push(args || []); if (args?.[0] === 'remote') { return { stdout: HTTPS_URL, stderr: '', exitCode: 0 }; @@ -226,7 +226,7 @@ describe('stageAndCommit (tested through createPR)', () => { it('skips commit when nothing staged', async () => { const calls: string[][] = []; - mockRunCommand.mockImplementation(async (cmd, args) => { + mockRunCommand.mockImplementation(async (_cmd, args) => { calls.push(args || []); if (args?.[0] === 'remote') { return { stdout: HTTPS_URL, stderr: '', exitCode: 0 }; @@ -280,7 +280,7 @@ describe('stageAndCommit (tested through createPR)', () => { describe('pushBranch (tested through createPR)', () => { it('pushes with -u origin flag', async () => { const calls: string[][] = []; - mockGitCommands((cmd, args) => { + mockGitCommands((_cmd, args) => { calls.push(args || []); if (args?.[0] === 'status' && args?.[1] === '--porcelain') { return { stdout: '', stderr: '', exitCode: 0 }; @@ -320,7 +320,7 @@ describe('pushBranch (tested through createPR)', () => { describe('verifyBranchOnRemote (tested through createPR)', () => { it('throws when branch not on remote', async () => { - mockGitCommands((cmd, args) => { + mockGitCommands((_cmd, args) => { if (args?.[0] === 'ls-remote') { return { stdout: '', stderr: '', exitCode: 0 }; // empty = not found } @@ -342,7 +342,7 @@ describe('verifyBranchOnRemote (tested through createPR)', () => { describe('createPR', () => { function setupSuccessfulGitCommands() { - mockGitCommands((cmd, args) => { + mockGitCommands((_cmd, args) => { if (args?.[0] === 'status' && args?.[1] === '--porcelain') { return { stdout: '', stderr: '', exitCode: 0 }; } @@ -355,7 +355,7 @@ describe('createPR', () => { it('commits and pushes by default', async () => { const calls: string[][] = []; - mockRunCommand.mockImplementation(async (cmd, args) => { + mockRunCommand.mockImplementation(async (_cmd, args) => { calls.push(args || []); if (args?.[0] === 'remote') return { stdout: HTTPS_URL, stderr: '', exitCode: 0 }; if (args?.[0] === 'status' && args?.[1] === '--porcelain') @@ -379,7 +379,7 @@ describe('createPR', () => { it('skips commit when commit=false', async () => { const calls: string[][] = []; - mockRunCommand.mockImplementation(async (cmd, args) => { + mockRunCommand.mockImplementation(async (_cmd, args) => { calls.push(args || []); if (args?.[0] === 'remote') return { stdout: HTTPS_URL, stderr: '', exitCode: 0 }; if (args?.[0] === 'ls-remote') @@ -400,7 +400,7 @@ describe('createPR', () => { it('skips push when push=false', async () => { const calls: string[][] = []; - mockRunCommand.mockImplementation(async (cmd, args) => { + mockRunCommand.mockImplementation(async (_cmd, args) => { calls.push(args || []); if (args?.[0] === 'remote') return { stdout: HTTPS_URL, stderr: '', exitCode: 0 }; if (args?.[0] === 'ls-remote') @@ -499,7 +499,7 @@ describe('createPR', () => { it('uses custom commitMessage when provided', async () => { const calls: string[][] = []; - mockRunCommand.mockImplementation(async (cmd, args) => { + mockRunCommand.mockImplementation(async (_cmd, args) => { calls.push(args || []); if (args?.[0] === 'remote') return { stdout: HTTPS_URL, stderr: '', exitCode: 0 }; if (args?.[0] === 'status' && args?.[1] === '--porcelain') diff --git a/tests/unit/gadgets/github/core/misc.test.ts b/tests/unit/gadgets/github/core/misc.test.ts index 774d27dd..6ada4343 100644 --- a/tests/unit/gadgets/github/core/misc.test.ts +++ b/tests/unit/gadgets/github/core/misc.test.ts @@ -1,4 +1,4 @@ -import { beforeEach, describe, expect, it, vi } from 'vitest'; +import { describe, expect, it, vi } from 'vitest'; vi.mock('../../../../../src/github/client.js', () => ({ githubClient: { diff --git a/tests/unit/gadgets/pm/core/addChecklist.test.ts b/tests/unit/gadgets/pm/core/addChecklist.test.ts index db59eeb1..f18b2961 100644 --- a/tests/unit/gadgets/pm/core/addChecklist.test.ts +++ b/tests/unit/gadgets/pm/core/addChecklist.test.ts @@ -1,4 +1,4 @@ -import { beforeEach, describe, expect, it, vi } from 'vitest'; +import { describe, expect, it, vi } from 'vitest'; import { createMockPMProvider } from '../../../../helpers/mockPMProvider.js'; diff --git a/tests/unit/gadgets/pm/core/createWorkItem.test.ts b/tests/unit/gadgets/pm/core/createWorkItem.test.ts index 4196cea8..3ad63ae3 100644 --- a/tests/unit/gadgets/pm/core/createWorkItem.test.ts +++ b/tests/unit/gadgets/pm/core/createWorkItem.test.ts @@ -1,4 +1,4 @@ -import { beforeEach, describe, expect, it, vi } from 'vitest'; +import { describe, expect, it, vi } from 'vitest'; import { createMockPMProvider } from '../../../../helpers/mockPMProvider.js'; diff --git a/tests/unit/gadgets/pm/core/deleteChecklistItem.test.ts b/tests/unit/gadgets/pm/core/deleteChecklistItem.test.ts index cd3c84f9..d3168616 100644 --- a/tests/unit/gadgets/pm/core/deleteChecklistItem.test.ts +++ b/tests/unit/gadgets/pm/core/deleteChecklistItem.test.ts @@ -1,4 +1,4 @@ -import { beforeEach, describe, expect, it, vi } from 'vitest'; +import { describe, expect, it, vi } from 'vitest'; import { createMockPMProvider } from '../../../../helpers/mockPMProvider.js'; diff --git a/tests/unit/gadgets/pm/core/listWorkItems.test.ts b/tests/unit/gadgets/pm/core/listWorkItems.test.ts index 33bc5f1f..144e9747 100644 --- a/tests/unit/gadgets/pm/core/listWorkItems.test.ts +++ b/tests/unit/gadgets/pm/core/listWorkItems.test.ts @@ -1,4 +1,4 @@ -import { beforeEach, describe, expect, it, vi } from 'vitest'; +import { describe, expect, it, vi } from 'vitest'; import { createMockPMProvider } from '../../../../helpers/mockPMProvider.js'; diff --git a/tests/unit/gadgets/pm/core/moveWorkItem.test.ts b/tests/unit/gadgets/pm/core/moveWorkItem.test.ts index ec721a0c..6a28a580 100644 --- a/tests/unit/gadgets/pm/core/moveWorkItem.test.ts +++ b/tests/unit/gadgets/pm/core/moveWorkItem.test.ts @@ -1,4 +1,4 @@ -import { beforeEach, describe, expect, it, vi } from 'vitest'; +import { describe, expect, it, vi } from 'vitest'; import { createMockPMProvider } from '../../../../helpers/mockPMProvider.js'; diff --git a/tests/unit/gadgets/pm/core/readWorkItem.test.ts b/tests/unit/gadgets/pm/core/readWorkItem.test.ts index ba1f5238..eb88cfaf 100644 --- a/tests/unit/gadgets/pm/core/readWorkItem.test.ts +++ b/tests/unit/gadgets/pm/core/readWorkItem.test.ts @@ -1,4 +1,4 @@ -import { beforeEach, describe, expect, it, vi } from 'vitest'; +import { describe, expect, it, vi } from 'vitest'; import { createMockPMProvider } from '../../../../helpers/mockPMProvider.js'; diff --git a/tests/unit/gadgets/pm/core/updateChecklistItem.test.ts b/tests/unit/gadgets/pm/core/updateChecklistItem.test.ts index 7065812e..99619a59 100644 --- a/tests/unit/gadgets/pm/core/updateChecklistItem.test.ts +++ b/tests/unit/gadgets/pm/core/updateChecklistItem.test.ts @@ -1,4 +1,4 @@ -import { beforeEach, describe, expect, it, vi } from 'vitest'; +import { describe, expect, it, vi } from 'vitest'; import { createMockPMProvider } from '../../../../helpers/mockPMProvider.js'; diff --git a/tests/unit/gadgets/pm/core/updateWorkItem.test.ts b/tests/unit/gadgets/pm/core/updateWorkItem.test.ts index 21291f25..6a28eb65 100644 --- a/tests/unit/gadgets/pm/core/updateWorkItem.test.ts +++ b/tests/unit/gadgets/pm/core/updateWorkItem.test.ts @@ -1,4 +1,4 @@ -import { beforeEach, describe, expect, it, vi } from 'vitest'; +import { describe, expect, it, vi } from 'vitest'; import { createMockPMProvider } from '../../../../helpers/mockPMProvider.js'; diff --git a/tests/unit/gadgets/session/core/finish.test.ts b/tests/unit/gadgets/session/core/finish.test.ts index f38f8f34..f41729d4 100644 --- a/tests/unit/gadgets/session/core/finish.test.ts +++ b/tests/unit/gadgets/session/core/finish.test.ts @@ -1,4 +1,4 @@ -import { beforeEach, describe, expect, it, vi } from 'vitest'; +import { describe, expect, it, vi } from 'vitest'; vi.mock('../../../../../src/github/client.js', () => ({ githubClient: { diff --git a/tests/unit/gadgets/sessionState.test.ts b/tests/unit/gadgets/sessionState.test.ts index 758b1360..05b54888 100644 --- a/tests/unit/gadgets/sessionState.test.ts +++ b/tests/unit/gadgets/sessionState.test.ts @@ -12,7 +12,6 @@ vi.mock('../../../src/github/client.js', () => ({ })); import { - SessionState, clearInitialComment, createSessionState, deleteInitialComment, @@ -26,6 +25,7 @@ import { recordInitialComment, recordPRCreation, recordReviewSubmission, + SessionState, setDefaultSessionState, } from '../../../src/gadgets/sessionState.js'; diff --git a/tests/unit/gadgets/shared/factories.test.ts b/tests/unit/gadgets/shared/factories.test.ts index 0d84de97..79ea8492 100644 --- a/tests/unit/gadgets/shared/factories.test.ts +++ b/tests/unit/gadgets/shared/factories.test.ts @@ -38,9 +38,9 @@ import { createCLICommand, } from '../../../../src/gadgets/shared/cliCommandFactory.js'; import { - type GadgetCoreFn, buildZodSchema, createGadgetClass, + type GadgetCoreFn, } from '../../../../src/gadgets/shared/gadgetFactory.js'; import { generateToolManifest } from '../../../../src/gadgets/shared/manifestGenerator.js'; import type { ToolDefinition } from '../../../../src/gadgets/shared/toolDefinition.js'; diff --git a/tests/unit/gadgets/shared/pathValidation.test.ts b/tests/unit/gadgets/shared/pathValidation.test.ts index 2a285939..08f6d584 100644 --- a/tests/unit/gadgets/shared/pathValidation.test.ts +++ b/tests/unit/gadgets/shared/pathValidation.test.ts @@ -16,7 +16,7 @@ vi.mock('../../../../src/utils/repo.js', () => ({ import { validatePath } from '../../../../src/gadgets/shared/pathValidation.js'; describe('validatePath', () => { - const originalCwd = process.cwd(); + const _originalCwd = process.cwd(); beforeEach(() => { vi.resetAllMocks(); diff --git a/tests/unit/gadgets/tmux.test.ts b/tests/unit/gadgets/tmux.test.ts index 5626d043..91a213a3 100644 --- a/tests/unit/gadgets/tmux.test.ts +++ b/tests/unit/gadgets/tmux.test.ts @@ -1,17 +1,17 @@ import { resolve } from 'node:path'; import { describe, expect, it, vi } from 'vitest'; -import { - Tmux, - consumePendingSessionNotices, - resolveWorkingDirectory, - validateGitCommand, -} from '../../../src/gadgets/tmux.js'; import { sanitizeSessionName, sleep, stripAnsi, unescapeOutput, } from '../../../src/gadgets/tmux/utils.js'; +import { + consumePendingSessionNotices, + resolveWorkingDirectory, + Tmux, + validateGitCommand, +} from '../../../src/gadgets/tmux.js'; describe('Tmux Gadget', () => { describe('resolveWorkingDirectory', () => { diff --git a/tests/unit/gadgets/tmux/TmuxControlClient.test.ts b/tests/unit/gadgets/tmux/TmuxControlClient.test.ts index edb9a872..430a2fef 100644 --- a/tests/unit/gadgets/tmux/TmuxControlClient.test.ts +++ b/tests/unit/gadgets/tmux/TmuxControlClient.test.ts @@ -1,5 +1,5 @@ import { EventEmitter } from 'node:events'; -import { type MockInstance, afterEach, beforeEach, describe, expect, it, vi } from 'vitest'; +import { afterEach, beforeEach, describe, expect, it, type MockInstance, vi } from 'vitest'; import { EXIT_MARKER_PREFIX, EXIT_MARKER_SUFFIX } from '../../../../src/gadgets/tmux/constants.js'; // ─── Mock readline ───────────────────────────────────────────────────────────── diff --git a/tests/unit/gadgets/tmux/TmuxGadget.test.ts b/tests/unit/gadgets/tmux/TmuxGadget.test.ts index 3431f809..3fc44581 100644 --- a/tests/unit/gadgets/tmux/TmuxGadget.test.ts +++ b/tests/unit/gadgets/tmux/TmuxGadget.test.ts @@ -1,4 +1,4 @@ -import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest'; +import { beforeEach, describe, expect, it, vi } from 'vitest'; import { CommandFailedError } from '../../../../src/gadgets/tmux/errors.js'; // ─── Mock the control client module ─────────────────────────────────────────── diff --git a/tests/unit/gadgets/todo-storage.test.ts b/tests/unit/gadgets/todo-storage.test.ts index e0538751..67503ccb 100644 --- a/tests/unit/gadgets/todo-storage.test.ts +++ b/tests/unit/gadgets/todo-storage.test.ts @@ -13,13 +13,13 @@ vi.mock('../../../src/utils/repo.js', () => ({ import { existsSync, mkdirSync, readFileSync, writeFileSync } from 'node:fs'; import { - type Todo, formatTodoList, getNextId, getSessionId, initTodoSession, loadTodos, saveTodos, + type Todo, } from '../../../src/gadgets/todo/storage.js'; describe('todo storage', () => { diff --git a/tests/unit/gadgets/todo.test.ts b/tests/unit/gadgets/todo.test.ts index 1e709da1..a93e3d10 100644 --- a/tests/unit/gadgets/todo.test.ts +++ b/tests/unit/gadgets/todo.test.ts @@ -1,4 +1,4 @@ -import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest'; +import { beforeEach, describe, expect, it, vi } from 'vitest'; import { TodoUpdateStatus } from '../../../src/gadgets/todo/TodoUpdateStatus.js'; import { TodoUpsert } from '../../../src/gadgets/todo/TodoUpsert.js'; @@ -20,7 +20,7 @@ vi.mock('../../../src/gadgets/todo/storage.js', () => { }), getNextId: vi.fn((existingTodos) => { const maxId = existingTodos.reduce( - (max: number, t: { id: string }) => Math.max(max, Number.parseInt(t.id) || 0), + (max: number, t: { id: string }) => Math.max(max, Number.parseInt(t.id, 10) || 0), 0, ); return String(maxId + 1); diff --git a/tests/unit/gadgets/todo/todoDelete.test.ts b/tests/unit/gadgets/todo/todoDelete.test.ts index 8d50f9ce..5ea64fc2 100644 --- a/tests/unit/gadgets/todo/todoDelete.test.ts +++ b/tests/unit/gadgets/todo/todoDelete.test.ts @@ -6,8 +6,8 @@ vi.mock('../../../../src/gadgets/todo/storage.js', () => ({ formatTodoList: vi.fn(), })); -import { TodoDelete } from '../../../../src/gadgets/todo/TodoDelete.js'; import { formatTodoList, loadTodos, saveTodos } from '../../../../src/gadgets/todo/storage.js'; +import { TodoDelete } from '../../../../src/gadgets/todo/TodoDelete.js'; const mockLoadTodos = vi.mocked(loadTodos); const mockSaveTodos = vi.mocked(saveTodos); diff --git a/tests/unit/github/client.test.ts b/tests/unit/github/client.test.ts index 113fab38..d434d47d 100644 --- a/tests/unit/github/client.test.ts +++ b/tests/unit/github/client.test.ts @@ -1,4 +1,4 @@ -import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest'; +import { describe, expect, it, vi } from 'vitest'; // Mock Octokit before importing client const mockPulls = { @@ -52,14 +52,13 @@ vi.mock('../../../src/utils/logging.js', () => ({ }, })); +import { Octokit } from '@octokit/rest'; import { getGitHubUserForToken, githubClient, withGitHubToken, } from '../../../src/github/client.js'; -import { Octokit } from '@octokit/rest'; - describe('githubClient', () => { describe('getClient throws without scope', () => { it('throws when no withGitHubToken scope is active', async () => { @@ -808,7 +807,7 @@ describe('githubClient', () => { }); }); - it('falls back to Run #${id} when run name is null', async () => { + it('falls back to Run # when run name is null', async () => { mockActions.listWorkflowRunsForRepo.mockResolvedValue({ data: { workflow_runs: [{ id: 99, name: null, conclusion: 'failure' }], diff --git a/tests/unit/github/integration.test.ts b/tests/unit/github/integration.test.ts index ecff1f21..23760389 100644 --- a/tests/unit/github/integration.test.ts +++ b/tests/unit/github/integration.test.ts @@ -1,4 +1,4 @@ -import { beforeEach, describe, expect, it, vi } from 'vitest'; +import { describe, expect, it, vi } from 'vitest'; // --------------------------------------------------------------------------- // Mocks diff --git a/tests/unit/github/personas.test.ts b/tests/unit/github/personas.test.ts index 1b54958f..bbe33812 100644 --- a/tests/unit/github/personas.test.ts +++ b/tests/unit/github/personas.test.ts @@ -20,6 +20,7 @@ vi.mock('../../../src/utils/logging.js', () => ({ import { getIntegrationCredential } from '../../../src/config/provider.js'; import { getGitHubUserForToken } from '../../../src/github/client.js'; +import type { PersonaIdentities } from '../../../src/github/personas.js'; import { _resetPersonaIdentityCache, getPersonaForAgentType, @@ -28,7 +29,6 @@ import { isCascadeBot, resolvePersonaIdentities, } from '../../../src/github/personas.js'; -import type { PersonaIdentities } from '../../../src/github/personas.js'; describe('personas', () => { beforeEach(() => { diff --git a/tests/unit/instrument.test.ts b/tests/unit/instrument.test.ts index 039b39ad..7ff07db6 100644 --- a/tests/unit/instrument.test.ts +++ b/tests/unit/instrument.test.ts @@ -15,18 +15,13 @@ describe('instrument (Sentry init)', () => { }); afterEach(() => { - // biome-ignore lint/performance/noDelete: process.env requires delete to truly unset delete process.env.SENTRY_DSN; - // biome-ignore lint/performance/noDelete: process.env requires delete to truly unset delete process.env.SENTRY_ENVIRONMENT; - // biome-ignore lint/performance/noDelete: process.env requires delete to truly unset delete process.env.SENTRY_RELEASE; - // biome-ignore lint/performance/noDelete: process.env requires delete to truly unset delete process.env.SENTRY_TRACES_SAMPLE_RATE; }); it('does NOT call Sentry.init when SENTRY_DSN is unset', async () => { - // biome-ignore lint/performance/noDelete: process.env requires delete to truly unset delete process.env.SENTRY_DSN; await import('../../src/instrument.js'); expect(mockInit).not.toHaveBeenCalled(); @@ -49,7 +44,6 @@ describe('instrument (Sentry init)', () => { it('falls back to NODE_ENV for environment', async () => { process.env.SENTRY_DSN = 'https://fake@sentry.io/123'; - // biome-ignore lint/performance/noDelete: process.env requires delete to truly unset delete process.env.SENTRY_ENVIRONMENT; const originalNodeEnv = process.env.NODE_ENV; process.env.NODE_ENV = 'test'; @@ -67,7 +61,6 @@ describe('instrument (Sentry init)', () => { it('defaults tracesSampleRate to 0.1', async () => { process.env.SENTRY_DSN = 'https://fake@sentry.io/123'; - // biome-ignore lint/performance/noDelete: process.env requires delete to truly unset delete process.env.SENTRY_TRACES_SAMPLE_RATE; await import('../../src/instrument.js'); expect(mockInit).toHaveBeenCalledWith(expect.objectContaining({ tracesSampleRate: 0.1 })); diff --git a/tests/unit/pm/context.test.ts b/tests/unit/pm/context.test.ts index fd9b95d5..d47bed03 100644 --- a/tests/unit/pm/context.test.ts +++ b/tests/unit/pm/context.test.ts @@ -171,7 +171,7 @@ describe('pm/context', () => { await withPMProvider(innerProvider, async () => { throw new Error('Inner error'); }); - } catch (error) { + } catch (_error) { // Expected error } diff --git a/tests/unit/pm/media.test.ts b/tests/unit/pm/media.test.ts index b7f7a062..cf095a73 100644 --- a/tests/unit/pm/media.test.ts +++ b/tests/unit/pm/media.test.ts @@ -1,11 +1,11 @@ import { afterEach, describe, expect, it, vi } from 'vitest'; import { - MAX_IMAGES_PER_WORK_ITEM, - MAX_IMAGE_SIZE_BYTES, downloadMedia, extractMarkdownImages, filterImageMedia, isImageMimeType, + MAX_IMAGE_SIZE_BYTES, + MAX_IMAGES_PER_WORK_ITEM, resolveJiraMediaUrls, } from '../../../src/pm/media.js'; import type { MediaReference } from '../../../src/pm/types.js'; diff --git a/tests/unit/queue/cancel.test.ts b/tests/unit/queue/cancel.test.ts index be1f3706..296b4eb4 100644 --- a/tests/unit/queue/cancel.test.ts +++ b/tests/unit/queue/cancel.test.ts @@ -90,7 +90,6 @@ describe('publishCancelCommand', () => { it('throws an error when REDIS_URL is not set', async () => { const saved = process.env.REDIS_URL; - // biome-ignore lint/performance/noDelete: need to fully remove the key delete process.env.REDIS_URL; try { @@ -298,7 +297,6 @@ describe('subscribeToCancelCommands', () => { it('throws an error when REDIS_URL is not set', async () => { const saved = process.env.REDIS_URL; - // biome-ignore lint/performance/noDelete: need to fully remove the key delete process.env.REDIS_URL; try { diff --git a/tests/unit/queue/client.test.ts b/tests/unit/queue/client.test.ts index 9058d86f..67e8018f 100644 --- a/tests/unit/queue/client.test.ts +++ b/tests/unit/queue/client.test.ts @@ -172,7 +172,6 @@ describe('getQueue error handling', () => { it('throws an error when REDIS_URL is not set', async () => { // Ensure REDIS_URL is not present in the environment const saved = process.env.REDIS_URL; - // biome-ignore lint/performance/noDelete: need to fully remove the key so !redisUrl is true delete process.env.REDIS_URL; try { diff --git a/tests/unit/router/ackMessageGenerator.test.ts b/tests/unit/router/ackMessageGenerator.test.ts index 25a5f89c..eddc22cb 100644 --- a/tests/unit/router/ackMessageGenerator.test.ts +++ b/tests/unit/router/ackMessageGenerator.test.ts @@ -1,4 +1,4 @@ -import { beforeEach, describe, expect, it, vi } from 'vitest'; +import { describe, expect, it, vi } from 'vitest'; // Mock heavy imports before importing the module under test const mockTextComplete = vi.fn(); diff --git a/tests/unit/router/adapters/github.test.ts b/tests/unit/router/adapters/github.test.ts index 029c2aa0..a7a92407 100644 --- a/tests/unit/router/adapters/github.test.ts +++ b/tests/unit/router/adapters/github.test.ts @@ -83,8 +83,8 @@ import { resolveGitHubTokenForAckByAgent, } from '../../../../src/router/acknowledgments.js'; import { GitHubRouterAdapter, injectEventType } from '../../../../src/router/adapters/github.js'; -import { loadProjectConfig } from '../../../../src/router/config.js'; import type { RouterProjectConfig } from '../../../../src/router/config.js'; +import { loadProjectConfig } from '../../../../src/router/config.js'; import { extractPRNumber } from '../../../../src/router/notifications.js'; import { addEyesReactionToPR } from '../../../../src/router/pre-actions.js'; import type { GitHubJob } from '../../../../src/router/queue.js'; diff --git a/tests/unit/router/adapters/jira.test.ts b/tests/unit/router/adapters/jira.test.ts index e207f7c4..775c9459 100644 --- a/tests/unit/router/adapters/jira.test.ts +++ b/tests/unit/router/adapters/jira.test.ts @@ -44,8 +44,8 @@ vi.mock('../../../../src/jira/client.js', () => ({ import { postJiraAck, resolveJiraBotAccountId } from '../../../../src/router/acknowledgments.js'; import { JiraRouterAdapter } from '../../../../src/router/adapters/jira.js'; -import { loadProjectConfig } from '../../../../src/router/config.js'; import type { RouterProjectConfig } from '../../../../src/router/config.js'; +import { loadProjectConfig } from '../../../../src/router/config.js'; import { resolveJiraCredentials } from '../../../../src/router/platformClients/index.js'; import { sendAcknowledgeReaction } from '../../../../src/router/reactions.js'; import type { TriggerRegistry } from '../../../../src/triggers/registry.js'; diff --git a/tests/unit/router/adapters/sentry.test.ts b/tests/unit/router/adapters/sentry.test.ts index a45054c5..9ffd17e2 100644 --- a/tests/unit/router/adapters/sentry.test.ts +++ b/tests/unit/router/adapters/sentry.test.ts @@ -8,8 +8,8 @@ vi.mock('../../../../src/router/config.js', () => ({ })); import { SentryRouterAdapter } from '../../../../src/router/adapters/sentry.js'; -import { loadProjectConfig } from '../../../../src/router/config.js'; import type { RouterProjectConfig } from '../../../../src/router/config.js'; +import { loadProjectConfig } from '../../../../src/router/config.js'; import type { SentryJob } from '../../../../src/router/queue.js'; import type { TriggerRegistry } from '../../../../src/triggers/registry.js'; diff --git a/tests/unit/router/adapters/trello.test.ts b/tests/unit/router/adapters/trello.test.ts index ed44a7a6..57e2945a 100644 --- a/tests/unit/router/adapters/trello.test.ts +++ b/tests/unit/router/adapters/trello.test.ts @@ -46,8 +46,8 @@ vi.mock('../../../../src/router/trello.js', () => ({ import { postTrelloAck } from '../../../../src/router/acknowledgments.js'; import { TrelloRouterAdapter } from '../../../../src/router/adapters/trello.js'; -import { loadProjectConfig } from '../../../../src/router/config.js'; import type { RouterProjectConfig } from '../../../../src/router/config.js'; +import { loadProjectConfig } from '../../../../src/router/config.js'; import { resolveTrelloCredentials } from '../../../../src/router/platformClients/index.js'; import { sendAcknowledgeReaction } from '../../../../src/router/reactions.js'; import { isCardInTriggerList, isSelfAuthoredTrelloComment } from '../../../../src/router/trello.js'; diff --git a/tests/unit/router/config.test.ts b/tests/unit/router/config.test.ts index 6911d975..9699447e 100644 --- a/tests/unit/router/config.test.ts +++ b/tests/unit/router/config.test.ts @@ -17,11 +17,7 @@ vi.mock('../../../src/config/configCache.js', () => ({ })); import { loadConfig } from '../../../src/config/provider.js'; -import { - _resetProjectConfigCache, - loadProjectConfig, - routerConfig, -} from '../../../src/router/config.js'; +import { routerConfig } from '../../../src/router/config.js'; const mockLoadConfig = vi.mocked(loadConfig); diff --git a/tests/unit/router/platformClients.test.ts b/tests/unit/router/platformClients.test.ts index 0291ccf7..c2612b95 100644 --- a/tests/unit/router/platformClients.test.ts +++ b/tests/unit/router/platformClients.test.ts @@ -31,10 +31,10 @@ vi.mock('../../../src/utils/logging.js', () => ({ import { findProjectById, getIntegrationCredential } from '../../../src/config/provider.js'; import { - TrelloPlatformClient, resolveGitHubHeaders, resolveJiraCredentials, resolveTrelloCredentials, + TrelloPlatformClient, } from '../../../src/router/platformClients/index.js'; import { logger } from '../../../src/utils/logging.js'; diff --git a/tests/unit/router/snapshot-manager.test.ts b/tests/unit/router/snapshot-manager.test.ts index 5195aea3..ed334d57 100644 --- a/tests/unit/router/snapshot-manager.test.ts +++ b/tests/unit/router/snapshot-manager.test.ts @@ -243,7 +243,7 @@ describe('snapshot-manager', () => { it('evicts expired snapshots by TTL', () => { const snap1 = registerSnapshot('proj-1', 'card-1', 'img-1:latest'); - const snap2 = registerSnapshot('proj-1', 'card-2', 'img-2:latest'); + const _snap2 = registerSnapshot('proj-1', 'card-2', 'img-2:latest'); // Backdate snap1 so it's expired snap1.createdAt = new Date(Date.now() - 2000); diff --git a/tests/unit/router/trello.test.ts b/tests/unit/router/trello.test.ts index 01a6335d..1812caab 100644 --- a/tests/unit/router/trello.test.ts +++ b/tests/unit/router/trello.test.ts @@ -1,4 +1,4 @@ -import { beforeEach, describe, expect, it, vi } from 'vitest'; +import { describe, expect, it, vi } from 'vitest'; vi.mock('../../../src/utils/logging.js', () => ({ logger: { diff --git a/tests/unit/router/webhook-processor.test.ts b/tests/unit/router/webhook-processor.test.ts index 6ab0e16c..55109284 100644 --- a/tests/unit/router/webhook-processor.test.ts +++ b/tests/unit/router/webhook-processor.test.ts @@ -1,4 +1,4 @@ -import { beforeEach, describe, expect, it, vi } from 'vitest'; +import { describe, expect, it, vi } from 'vitest'; vi.mock('../../../src/utils/logging.js', () => ({ logger: { @@ -29,8 +29,8 @@ import { isDuplicateAction, markActionProcessed } from '../../../src/router/acti import { checkAgentTypeConcurrency } from '../../../src/router/agent-type-lock.js'; import type { RouterProjectConfig } from '../../../src/router/config.js'; import type { RouterPlatformAdapter } from '../../../src/router/platform-adapter.js'; -import { addJob } from '../../../src/router/queue.js'; import type { CascadeJob } from '../../../src/router/queue.js'; +import { addJob } from '../../../src/router/queue.js'; import { processRouterWebhook } from '../../../src/router/webhook-processor.js'; import { isWorkItemLocked, markWorkItemEnqueued } from '../../../src/router/work-item-lock.js'; import type { TriggerRegistry } from '../../../src/triggers/registry.js'; diff --git a/tests/unit/router/work-item-lock.test.ts b/tests/unit/router/work-item-lock.test.ts index c3ec6450..e45af74f 100644 --- a/tests/unit/router/work-item-lock.test.ts +++ b/tests/unit/router/work-item-lock.test.ts @@ -12,8 +12,6 @@ vi.mock('../../../src/router/config.js', () => ({ import { countActiveRuns } from '../../../src/db/repositories/runsRepository.js'; import { - MAX_SAME_TYPE_PER_WORK_ITEM, - MAX_WORK_ITEM_CONCURRENCY, clearAllWorkItemLocks, clearWorkItemEnqueued, isWorkItemLocked, diff --git a/tests/unit/sentry.test.ts b/tests/unit/sentry.test.ts index a371d538..3f882627 100644 --- a/tests/unit/sentry.test.ts +++ b/tests/unit/sentry.test.ts @@ -24,7 +24,6 @@ describe('sentry wrappers', () => { beforeEach(async () => { vi.resetModules(); - // biome-ignore lint/performance/noDelete: process.env requires delete to truly unset delete process.env.SENTRY_DSN; sentry = await import('../../src/sentry.js'); }); @@ -65,7 +64,6 @@ describe('sentry wrappers', () => { }); afterEach(() => { - // biome-ignore lint/performance/noDelete: process.env requires delete to truly unset delete process.env.SENTRY_DSN; }); diff --git a/tests/unit/trello/client.test.ts b/tests/unit/trello/client.test.ts index d11d6de1..22c5f8f0 100644 --- a/tests/unit/trello/client.test.ts +++ b/tests/unit/trello/client.test.ts @@ -1,4 +1,4 @@ -import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest'; +import { afterEach, describe, expect, it, vi } from 'vitest'; vi.mock('../../../src/utils/logging.js', () => ({ logger: { @@ -41,7 +41,6 @@ vi.mock('trello.js', () => ({ })), })); -import { TrelloClient } from 'trello.js'; import { trelloClient, withTrelloCredentials } from '../../../src/trello/client.js'; describe('trelloClient', () => { @@ -469,7 +468,7 @@ describe('trelloClient', () => { }); it('handles missing fields gracefully', async () => { - const fetchSpy = vi + const _fetchSpy = vi .spyOn(globalThis, 'fetch') .mockResolvedValue(new Response(JSON.stringify([{}, { id: 'b1' }]), { status: 200 })); diff --git a/tests/unit/triggers/agent-execution.test.ts b/tests/unit/triggers/agent-execution.test.ts index 9991c28b..dbefbcf8 100644 --- a/tests/unit/triggers/agent-execution.test.ts +++ b/tests/unit/triggers/agent-execution.test.ts @@ -54,9 +54,9 @@ import { runAgent } from '../../../src/agents/registry.js'; import { getJiraConfig, getTrelloConfig } from '../../../src/pm/config.js'; import { getPMProvider } from '../../../src/pm/context.js'; import { - PMLifecycleManager, createPMProvider, hasAutoLabel, + PMLifecycleManager, resolveProjectPMConfig, } from '../../../src/pm/index.js'; import { runAgentExecutionPipeline } from '../../../src/triggers/shared/agent-execution.js'; diff --git a/tests/unit/triggers/agent-result-handler.test.ts b/tests/unit/triggers/agent-result-handler.test.ts index 63dd0560..83263f88 100644 --- a/tests/unit/triggers/agent-result-handler.test.ts +++ b/tests/unit/triggers/agent-result-handler.test.ts @@ -1,4 +1,4 @@ -import { beforeEach, describe, expect, it, vi } from 'vitest'; +import { describe, expect, it, vi } from 'vitest'; vi.mock('../../../src/pm/index.js', () => ({ getPMProvider: vi.fn(), diff --git a/tests/unit/triggers/budget.test.ts b/tests/unit/triggers/budget.test.ts index 92aa37dc..530ea803 100644 --- a/tests/unit/triggers/budget.test.ts +++ b/tests/unit/triggers/budget.test.ts @@ -1,4 +1,4 @@ -import { beforeEach, describe, expect, it, vi } from 'vitest'; +import { describe, expect, it, vi } from 'vitest'; vi.mock('../../../src/pm/index.js', () => ({ getPMProvider: vi.fn(), diff --git a/tests/unit/triggers/builtins.test.ts b/tests/unit/triggers/builtins.test.ts index 8a200107..3425a031 100644 --- a/tests/unit/triggers/builtins.test.ts +++ b/tests/unit/triggers/builtins.test.ts @@ -1,4 +1,4 @@ -import { beforeEach, describe, expect, it, vi } from 'vitest'; +import { describe, expect, it, vi } from 'vitest'; // Mock all trigger imports vi.mock('../../../src/triggers/github/check-suite-failure.js', () => ({ diff --git a/tests/unit/triggers/check-suite-failure.test.ts b/tests/unit/triggers/check-suite-failure.test.ts index e777b7a8..782e0150 100644 --- a/tests/unit/triggers/check-suite-failure.test.ts +++ b/tests/unit/triggers/check-suite-failure.test.ts @@ -11,6 +11,7 @@ vi.mock('../../../src/triggers/shared/trigger-check.js', () => mockTriggerCheckM vi.mock('../../../src/github/client.js', () => mockGitHubClientModule); +import { githubClient } from '../../../src/github/client.js'; import { CheckSuiteFailureTrigger, resetFixAttempts, @@ -19,11 +20,10 @@ import type { TriggerContext } from '../../../src/triggers/types.js'; import { createCheckSuitePayload, createMockProject } from '../../helpers/factories.js'; import { mockPersonaIdentities } from '../../helpers/mockPersonas.js'; -import { githubClient } from '../../../src/github/client.js'; - vi.mock('../../../src/db/repositories/prWorkItemsRepository.js', () => ({ lookupWorkItemForPR: vi.fn(), })); + import { lookupWorkItemForPR } from '../../../src/db/repositories/prWorkItemsRepository.js'; import { checkTriggerEnabled } from '../../../src/triggers/shared/trigger-check.js'; diff --git a/tests/unit/triggers/check-suite-success.test.ts b/tests/unit/triggers/check-suite-success.test.ts index aa36b955..b5390c5f 100644 --- a/tests/unit/triggers/check-suite-success.test.ts +++ b/tests/unit/triggers/check-suite-success.test.ts @@ -11,6 +11,7 @@ vi.mock('../../../src/triggers/shared/trigger-check.js', () => mockTriggerCheckM vi.mock('../../../src/github/client.js', () => mockGitHubClientModule); +import { githubClient } from '../../../src/github/client.js'; import { CheckSuiteSuccessTrigger, recentlyDispatched, @@ -21,8 +22,6 @@ import type { TriggerContext } from '../../../src/triggers/types.js'; import { createCheckSuitePayload, createMockProject } from '../../helpers/factories.js'; import { mockPersonaIdentities } from '../../helpers/mockPersonas.js'; -import { githubClient } from '../../../src/github/client.js'; - vi.mock('../../../src/db/repositories/prWorkItemsRepository.js', () => ({ lookupWorkItemForPR: vi.fn(), })); diff --git a/tests/unit/triggers/debug-runner.test.ts b/tests/unit/triggers/debug-runner.test.ts index a049ea27..44513d8d 100644 --- a/tests/unit/triggers/debug-runner.test.ts +++ b/tests/unit/triggers/debug-runner.test.ts @@ -49,6 +49,7 @@ import { } from '../../../src/triggers/shared/debug-status.js'; const mockPMProvider = { addComment: vi.fn() }; + import type { CascadeConfig } from '../../../src/types/index.js'; import { createMockProject } from '../../helpers/factories.js'; diff --git a/tests/unit/triggers/debug-trigger.test.ts b/tests/unit/triggers/debug-trigger.test.ts index c4018802..38f17c67 100644 --- a/tests/unit/triggers/debug-trigger.test.ts +++ b/tests/unit/triggers/debug-trigger.test.ts @@ -1,4 +1,4 @@ -import { beforeEach, describe, expect, it, vi } from 'vitest'; +import { describe, expect, it, vi } from 'vitest'; vi.mock('../../../src/db/repositories/runsRepository.js', () => ({ getRunById: vi.fn(), diff --git a/tests/unit/triggers/github-integration.test.ts b/tests/unit/triggers/github-integration.test.ts index 8deac69f..584c2ce5 100644 --- a/tests/unit/triggers/github-integration.test.ts +++ b/tests/unit/triggers/github-integration.test.ts @@ -1,4 +1,4 @@ -import { beforeEach, describe, expect, it, vi } from 'vitest'; +import { describe, expect, it, vi } from 'vitest'; vi.mock('../../../src/config/provider.js', () => ({ loadProjectConfigByRepo: vi.fn(), diff --git a/tests/unit/triggers/github-pr-comment-mention.test.ts b/tests/unit/triggers/github-pr-comment-mention.test.ts index 9f268b42..02749b94 100644 --- a/tests/unit/triggers/github-pr-comment-mention.test.ts +++ b/tests/unit/triggers/github-pr-comment-mention.test.ts @@ -37,11 +37,7 @@ import { PRCommentMentionTrigger } from '../../../src/triggers/github/pr-comment import { checkTriggerEnabled } from '../../../src/triggers/shared/trigger-check.js'; import type { TriggerContext } from '../../../src/triggers/types.js'; import { createMockProject } from '../../helpers/factories.js'; -import { - IMPLEMENTER_USERNAME, - REVIEWER_USERNAME, - mockPersonaIdentities, -} from '../../helpers/mockPersonas.js'; +import { IMPLEMENTER_USERNAME, mockPersonaIdentities } from '../../helpers/mockPersonas.js'; const HUMAN_USERNAME = 'alice-human'; const CARD_SHORT_ID = 'abc123card'; diff --git a/tests/unit/triggers/pr-merged.test.ts b/tests/unit/triggers/pr-merged.test.ts index 89d2eb67..7b7a62b4 100644 --- a/tests/unit/triggers/pr-merged.test.ts +++ b/tests/unit/triggers/pr-merged.test.ts @@ -54,15 +54,14 @@ vi.mock('../../../src/router/snapshot-manager.js', () => ({ // Register PM integrations in the registry import '../../../src/pm/index.js'; -import { PRMergedTrigger } from '../../../src/triggers/github/pr-merged.js'; -import type { TriggerContext } from '../../../src/triggers/types.js'; -import { createMockProject } from '../../helpers/factories.js'; - import { lookupWorkItemForPR } from '../../../src/db/repositories/prWorkItemsRepository.js'; import { githubClient } from '../../../src/github/client.js'; +import { PRMergedTrigger } from '../../../src/triggers/github/pr-merged.js'; import { isPipelineAtCapacity } from '../../../src/triggers/shared/backlog-check.js'; import { isLifecycleTriggerEnabled } from '../../../src/triggers/shared/lifecycle-check.js'; import { checkTriggerEnabled } from '../../../src/triggers/shared/trigger-check.js'; +import type { TriggerContext } from '../../../src/triggers/types.js'; +import { createMockProject } from '../../helpers/factories.js'; describe('PRMergedTrigger', () => { const trigger = new PRMergedTrigger(); diff --git a/tests/unit/triggers/pr-opened.test.ts b/tests/unit/triggers/pr-opened.test.ts index ca6f2b82..7e9dcfe0 100644 --- a/tests/unit/triggers/pr-opened.test.ts +++ b/tests/unit/triggers/pr-opened.test.ts @@ -12,6 +12,7 @@ import { createMockProject } from '../../helpers/factories.js'; vi.mock('../../../src/db/repositories/prWorkItemsRepository.js', () => ({ lookupWorkItemForPR: vi.fn(), })); + import { lookupWorkItemForPR } from '../../../src/db/repositories/prWorkItemsRepository.js'; import { checkTriggerEnabledWithParams } from '../../../src/triggers/shared/trigger-check.js'; diff --git a/tests/unit/triggers/pr-ready-to-merge.test.ts b/tests/unit/triggers/pr-ready-to-merge.test.ts index ed973916..dc930042 100644 --- a/tests/unit/triggers/pr-ready-to-merge.test.ts +++ b/tests/unit/triggers/pr-ready-to-merge.test.ts @@ -40,7 +40,10 @@ vi.mock('../../../src/db/repositories/prWorkItemsRepository.js', () => ({ // Register PM integrations in the registry import '../../../src/pm/index.js'; +import { lookupWorkItemForPR } from '../../../src/db/repositories/prWorkItemsRepository.js'; +import { githubClient } from '../../../src/github/client.js'; import { PRReadyToMergeTrigger } from '../../../src/triggers/github/pr-ready-to-merge.js'; +import { isLifecycleTriggerEnabled } from '../../../src/triggers/shared/lifecycle-check.js'; import type { TriggerContext } from '../../../src/triggers/types.js'; import { createCheckSuitePayload, @@ -48,10 +51,6 @@ import { createReviewPayload, } from '../../helpers/factories.js'; -import { lookupWorkItemForPR } from '../../../src/db/repositories/prWorkItemsRepository.js'; -import { githubClient } from '../../../src/github/client.js'; -import { isLifecycleTriggerEnabled } from '../../../src/triggers/shared/lifecycle-check.js'; - describe('PRReadyToMergeTrigger', () => { const trigger = new PRReadyToMergeTrigger(); diff --git a/tests/unit/triggers/pr-review-submitted.test.ts b/tests/unit/triggers/pr-review-submitted.test.ts index d9749e64..8cc91c29 100644 --- a/tests/unit/triggers/pr-review-submitted.test.ts +++ b/tests/unit/triggers/pr-review-submitted.test.ts @@ -13,6 +13,7 @@ import { mockPersonaIdentities } from '../../helpers/mockPersonas.js'; vi.mock('../../../src/db/repositories/prWorkItemsRepository.js', () => ({ lookupWorkItemForPR: vi.fn(), })); + import { lookupWorkItemForPR } from '../../../src/db/repositories/prWorkItemsRepository.js'; import { checkTriggerEnabled } from '../../../src/triggers/shared/trigger-check.js'; diff --git a/tests/unit/triggers/review-requested.test.ts b/tests/unit/triggers/review-requested.test.ts index f505c517..6998a891 100644 --- a/tests/unit/triggers/review-requested.test.ts +++ b/tests/unit/triggers/review-requested.test.ts @@ -14,6 +14,7 @@ import { mockPersonaIdentities } from '../../helpers/mockPersonas.js'; vi.mock('../../../src/db/repositories/prWorkItemsRepository.js', () => ({ lookupWorkItemForPR: vi.fn(), })); + import { lookupWorkItemForPR } from '../../../src/db/repositories/prWorkItemsRepository.js'; import { checkTriggerEnabled } from '../../../src/triggers/shared/trigger-check.js'; diff --git a/tests/unit/triggers/shared/agent-pm-poster.test.ts b/tests/unit/triggers/shared/agent-pm-poster.test.ts index 4bb9f5b9..cf2de8ec 100644 --- a/tests/unit/triggers/shared/agent-pm-poster.test.ts +++ b/tests/unit/triggers/shared/agent-pm-poster.test.ts @@ -32,10 +32,10 @@ vi.mock('../../../../src/utils/logging.js', () => ({ })); import { - PM_SUMMARY_AGENT_TYPES, formatAgentOutputForPM, formatReviewForPM, isOutputBasedAgent, + PM_SUMMARY_AGENT_TYPES, postAgentOutputToPM, postReviewToPM, } from '../../../../src/triggers/shared/agent-pm-poster.js'; diff --git a/tests/unit/utils/llmLogging.test.ts b/tests/unit/utils/llmLogging.test.ts index 971e13da..a7ade3b2 100644 --- a/tests/unit/utils/llmLogging.test.ts +++ b/tests/unit/utils/llmLogging.test.ts @@ -1,4 +1,4 @@ -import { beforeEach, describe, expect, it, vi } from 'vitest'; +import { describe, expect, it, vi } from 'vitest'; vi.mock('node:fs', () => ({ default: { diff --git a/tests/unit/utils/logging.test.ts b/tests/unit/utils/logging.test.ts index 958df256..271245de 100644 --- a/tests/unit/utils/logging.test.ts +++ b/tests/unit/utils/logging.test.ts @@ -1,5 +1,5 @@ import { afterEach, beforeEach, describe, expect, it } from 'vitest'; -import { LOG_LEVELS, getLogLevel, logger, setLogLevel } from '../../../src/utils/logging.js'; +import { getLogLevel, LOG_LEVELS, logger, setLogLevel } from '../../../src/utils/logging.js'; /** * Tests for setLogLevel and getLogLevel in utils/logging.ts. diff --git a/tests/unit/utils/safeOperation.test.ts b/tests/unit/utils/safeOperation.test.ts index d9390840..b9f41f10 100644 --- a/tests/unit/utils/safeOperation.test.ts +++ b/tests/unit/utils/safeOperation.test.ts @@ -1,4 +1,4 @@ -import { beforeEach, describe, expect, it, vi } from 'vitest'; +import { describe, expect, it, vi } from 'vitest'; import { safeOperation, silentOperation } from '../../../src/utils/safeOperation.js'; vi.mock('../../../src/utils/logging.js', () => ({ diff --git a/tests/unit/utils/webhookLogger.test.ts b/tests/unit/utils/webhookLogger.test.ts index 2e2cb75a..3d5caf4b 100644 --- a/tests/unit/utils/webhookLogger.test.ts +++ b/tests/unit/utils/webhookLogger.test.ts @@ -13,7 +13,7 @@ import { insertWebhookLog, pruneWebhookLogs, } from '../../../src/db/repositories/webhookLogsRepository.js'; -import { type WebhookLogInput, logWebhookCall } from '../../../src/utils/webhookLogger.js'; +import { logWebhookCall, type WebhookLogInput } from '../../../src/utils/webhookLogger.js'; const mockInsertWebhookLog = vi.mocked(insertWebhookLog); const mockPruneWebhookLogs = vi.mocked(pruneWebhookLogs); diff --git a/tests/unit/web/pm-wizard-state.test.ts b/tests/unit/web/pm-wizard-state.test.ts index d8710074..c7ff9fce 100644 --- a/tests/unit/web/pm-wizard-state.test.ts +++ b/tests/unit/web/pm-wizard-state.test.ts @@ -1,20 +1,19 @@ import { describe, expect, it } from 'vitest'; - +import type { + WizardAction, + WizardState, +} from '../../../web/src/components/projects/pm-wizard-state.js'; import { - INITIAL_JIRA_LABELS, areCredentialsReady, buildEditState, createInitialState, + INITIAL_JIRA_LABELS, isStep1Complete, isStep2Complete, isStep3Complete, isStep4Complete, wizardReducer, } from '../../../web/src/components/projects/pm-wizard-state.js'; -import type { - WizardAction, - WizardState, -} from '../../../web/src/components/projects/pm-wizard-state.js'; // ============================================================================ // createInitialState diff --git a/tests/unit/web/project-navigation.test.ts b/tests/unit/web/project-navigation.test.ts index b54c1e46..b4e28e5c 100644 --- a/tests/unit/web/project-navigation.test.ts +++ b/tests/unit/web/project-navigation.test.ts @@ -1,9 +1,9 @@ import { describe, expect, it } from 'vitest'; import { DEFAULT_PROJECT_SECTION, - PROJECT_SECTIONS, isProjectActive, isSectionActive, + PROJECT_SECTIONS, resolveDefaultProjectPath, } from '../../../web/src/lib/project-sections.js'; diff --git a/tests/unit/web/triggerAgentMapping.test.ts b/tests/unit/web/triggerAgentMapping.test.ts index 4fff2ba0..b11e6c24 100644 --- a/tests/unit/web/triggerAgentMapping.test.ts +++ b/tests/unit/web/triggerAgentMapping.test.ts @@ -3,8 +3,8 @@ import { AGENT_LABELS, ALL_AGENT_TYPES, CATEGORY_LABELS, - LIFECYCLE_TRIGGERS, getTriggerValue, + LIFECYCLE_TRIGGERS, setTriggerValue, } from '../../../web/src/lib/trigger-agent-mapping.js'; diff --git a/tests/unit/webhook/webhookHandlers.test.ts b/tests/unit/webhook/webhookHandlers.test.ts index b6d18d3c..fcace855 100644 --- a/tests/unit/webhook/webhookHandlers.test.ts +++ b/tests/unit/webhook/webhookHandlers.test.ts @@ -1,5 +1,5 @@ import { Hono } from 'hono'; -import { beforeEach, describe, expect, it, vi } from 'vitest'; +import { describe, expect, it, vi } from 'vitest'; // Must mock heavy imports BEFORE importing the module under test vi.mock('../../../src/utils/index.js', () => ({ diff --git a/tests/unit/webhook/webhookLogging.test.ts b/tests/unit/webhook/webhookLogging.test.ts index 368f182e..6e960dbd 100644 --- a/tests/unit/webhook/webhookLogging.test.ts +++ b/tests/unit/webhook/webhookLogging.test.ts @@ -1,4 +1,4 @@ -import { beforeEach, describe, expect, it, vi } from 'vitest'; +import { describe, expect, it, vi } from 'vitest'; // Must mock heavy imports BEFORE importing the module under test vi.mock('../../../src/utils/webhookLogger.js', () => ({ diff --git a/tests/unit/worker-entry.test.ts b/tests/unit/worker-entry.test.ts index 8d2bc352..13f9072e 100644 --- a/tests/unit/worker-entry.test.ts +++ b/tests/unit/worker-entry.test.ts @@ -91,15 +91,15 @@ import { triggerManualRun, triggerRetryRun } from '../../src/triggers/shared/man import { processTrelloWebhook } from '../../src/triggers/trello/webhook-handler.js'; import { type DebugAnalysisJobData, + dispatchJob, type GitHubJobData, type JiraJobData, type ManualRunJobData, + main, + processDashboardJob, type RetryRunJobData, type SentryJobData, type TrelloJobData, - dispatchJob, - main, - processDashboardJob, } from '../../src/worker-entry.js'; // ── dispatchJob routing tests ───────────────────────────────────────────────── @@ -519,11 +519,8 @@ describe('main() - environment variable validation', () => { afterEach(() => { exitSpy.mockRestore(); - // biome-ignore lint/performance/noDelete: process.env requires delete to truly unset delete process.env.JOB_ID; - // biome-ignore lint/performance/noDelete: process.env requires delete to truly unset delete process.env.JOB_TYPE; - // biome-ignore lint/performance/noDelete: process.env requires delete to truly unset delete process.env.JOB_DATA; }); diff --git a/tools/rotate-credential-key.ts b/tools/rotate-credential-key.ts index 6b41f653..9cc0649c 100644 --- a/tools/rotate-credential-key.ts +++ b/tools/rotate-credential-key.ts @@ -63,7 +63,7 @@ async function main() { .from(projectCredentials); let rotated = 0; - const skipped = 0; + const _skipped = 0; for (const cred of allCreds) { // Decrypt with current key (handles both encrypted and plaintext) diff --git a/tools/seed-config-from-json.ts b/tools/seed-config-from-json.ts index 07f1d26d..b3c95492 100644 --- a/tools/seed-config-from-json.ts +++ b/tools/seed-config-from-json.ts @@ -15,7 +15,7 @@ import { sql } from 'drizzle-orm'; import type { z } from 'zod'; import { type CascadeConfigSchema, validateConfig } from '../src/config/schema.js'; import { closeDb, getDb } from '../src/db/client.js'; -import { agentConfigs, projectIntegrations, projects } from '../src/db/schema/index.js'; +import { projectIntegrations, projects } from '../src/db/schema/index.js'; type CascadeConfig = z.infer; type ProjectConfig = CascadeConfig['projects'][number]; diff --git a/web/src/app.tsx b/web/src/app.tsx index efdb5386..b443c3fd 100644 --- a/web/src/app.tsx +++ b/web/src/app.tsx @@ -1,5 +1,5 @@ import { QueryClientProvider } from '@tanstack/react-query'; -import { RouterProvider, createRouter } from '@tanstack/react-router'; +import { createRouter, RouterProvider } from '@tanstack/react-router'; import { ThemeProvider } from 'next-themes'; import { Toaster } from './components/ui/sonner.js'; import { queryClient } from './lib/query-client.js'; diff --git a/web/src/components/debug/debug-analysis.tsx b/web/src/components/debug/debug-analysis.tsx index f2d619e4..89dfcdda 100644 --- a/web/src/components/debug/debug-analysis.tsx +++ b/web/src/components/debug/debug-analysis.tsx @@ -1,8 +1,8 @@ -import { Button } from '@/components/ui/button.js'; -import { trpc, trpcClient } from '@/lib/trpc.js'; import { useMutation, useQuery, useQueryClient } from '@tanstack/react-query'; import { useEffect, useRef } from 'react'; import ReactMarkdown from 'react-markdown'; +import { Button } from '@/components/ui/button.js'; +import { trpc, trpcClient } from '@/lib/trpc.js'; interface DebugAnalysisProps { runId: string; diff --git a/web/src/components/global/organization-form-dialog.tsx b/web/src/components/global/organization-form-dialog.tsx index 66d9a474..51a017c6 100644 --- a/web/src/components/global/organization-form-dialog.tsx +++ b/web/src/components/global/organization-form-dialog.tsx @@ -1,9 +1,9 @@ +import { useMutation, useQueryClient } from '@tanstack/react-query'; +import { useEffect, useState } from 'react'; import { Dialog, DialogContent, DialogHeader, DialogTitle } from '@/components/ui/dialog.js'; import { Input } from '@/components/ui/input.js'; import { Label } from '@/components/ui/label.js'; import { trpc, trpcClient } from '@/lib/trpc.js'; -import { useMutation, useQueryClient } from '@tanstack/react-query'; -import { useEffect, useState } from 'react'; function slugify(name: string): string { return name @@ -34,7 +34,9 @@ export function OrganizationFormDialog({ setId(organization.id); setIdManual(true); } else { - resetForm(); + setName(''); + setId(''); + setIdManual(false); } }, [organization]); diff --git a/web/src/components/layout/breadcrumbs.tsx b/web/src/components/layout/breadcrumbs.tsx index 8d714152..a3637182 100644 --- a/web/src/components/layout/breadcrumbs.tsx +++ b/web/src/components/layout/breadcrumbs.tsx @@ -1,8 +1,8 @@ -import { PROJECT_SECTIONS } from '@/lib/project-sections.js'; -import { trpc } from '@/lib/trpc.js'; import { useQuery } from '@tanstack/react-query'; import { Link, useRouterState } from '@tanstack/react-router'; import { ChevronRight } from 'lucide-react'; +import { PROJECT_SECTIONS } from '@/lib/project-sections.js'; +import { trpc } from '@/lib/trpc.js'; interface Segment { label: string; @@ -127,7 +127,7 @@ export function Breadcrumbs() { {segments.map((segment, i) => { const isLast = i === segments.length - 1; return ( - + {i > 0 && } {isLast || !segment.href ? ( { const { bg, text } = getToolStyle(tc.name); return ( + // biome-ignore lint/suspicious/noArrayIndexKey: tool calls have no unique ID; index is stable for this read-only list {tc.name} diff --git a/web/src/components/logs/log-viewer.tsx b/web/src/components/logs/log-viewer.tsx index 008aa2a7..ad9a793c 100644 --- a/web/src/components/logs/log-viewer.tsx +++ b/web/src/components/logs/log-viewer.tsx @@ -1,7 +1,7 @@ -import { trpc } from '@/lib/trpc.js'; -import { cn } from '@/lib/utils.js'; import { useQuery } from '@tanstack/react-query'; import { useState } from 'react'; +import { trpc } from '@/lib/trpc.js'; +import { cn } from '@/lib/utils.js'; interface LogViewerProps { runId: string; diff --git a/web/src/components/projects/agent-prompt-overrides.tsx b/web/src/components/projects/agent-prompt-overrides.tsx index 94770753..b61b57f0 100644 --- a/web/src/components/projects/agent-prompt-overrides.tsx +++ b/web/src/components/projects/agent-prompt-overrides.tsx @@ -1,3 +1,5 @@ +import { useMutation, useQuery } from '@tanstack/react-query'; +import { useEffect, useState } from 'react'; import { PromptSectionTab, ValidationStatus, @@ -10,8 +12,6 @@ import { ReferencePanel } from '@/components/settings/prompt-editor.js'; */ import { Badge } from '@/components/ui/badge.js'; import { trpc, trpcClient } from '@/lib/trpc.js'; -import { useMutation, useQuery } from '@tanstack/react-query'; -import { useEffect, useState } from 'react'; interface AgentPromptOverridesProps { projectId: string; @@ -255,7 +255,7 @@ type BadgeType = 'custom' | 'inherited' | 'default'; function getInheritanceBadge({ projectOverride, globalPrompt, - defaultPrompt, + defaultPrompt: _defaultPrompt, }: { projectOverride: string | null; globalPrompt: string | null; diff --git a/web/src/components/projects/integration-form.tsx b/web/src/components/projects/integration-form.tsx index 8e520258..cc69f51e 100644 --- a/web/src/components/projects/integration-form.tsx +++ b/web/src/components/projects/integration-form.tsx @@ -1,7 +1,3 @@ -import { Input } from '@/components/ui/input.js'; -import { Label } from '@/components/ui/label.js'; -import { API_URL } from '@/lib/api.js'; -import { trpc, trpcClient } from '@/lib/trpc.js'; import { useMutation, useQuery, useQueryClient } from '@tanstack/react-query'; import { AlertCircle, @@ -15,6 +11,10 @@ import { Trash2, } from 'lucide-react'; import { useEffect, useState } from 'react'; +import { Input } from '@/components/ui/input.js'; +import { Label } from '@/components/ui/label.js'; +import { API_URL } from '@/lib/api.js'; +import { trpc, trpcClient } from '@/lib/trpc.js'; import { PMWizard } from './pm-wizard.js'; import { ProjectSecretField } from './project-secret-field.js'; @@ -308,13 +308,7 @@ interface SCMTabProject { branchPrefix?: string | null; } -function SCMTab({ - projectId, - project, -}: { - projectId: string; - project?: SCMTabProject; -}) { +function SCMTab({ projectId, project }: { projectId: string; project?: SCMTabProject }) { const queryClient = useQueryClient(); // Project-level SCM fields diff --git a/web/src/components/projects/pm-wizard-common-steps.tsx b/web/src/components/projects/pm-wizard-common-steps.tsx index 330ff73a..2c97c272 100644 --- a/web/src/components/projects/pm-wizard-common-steps.tsx +++ b/web/src/components/projects/pm-wizard-common-steps.tsx @@ -2,7 +2,7 @@ * Provider-agnostic step renderer components for PMWizard: * WebhookStep and SaveStep. */ -import { Label } from '@/components/ui/label.js'; + import type { UseMutationResult } from '@tanstack/react-query'; import { AlertCircle, @@ -16,6 +16,7 @@ import { Trash2, } from 'lucide-react'; import { useState } from 'react'; +import { Label } from '@/components/ui/label.js'; import type { WizardState } from './pm-wizard-state.js'; // ============================================================================ diff --git a/web/src/components/projects/pm-wizard-hooks.ts b/web/src/components/projects/pm-wizard-hooks.ts index 45b138b9..591ea0f8 100644 --- a/web/src/components/projects/pm-wizard-hooks.ts +++ b/web/src/components/projects/pm-wizard-hooks.ts @@ -2,10 +2,11 @@ * Custom hooks for PM Wizard mutations and side-effects. * Each hook encapsulates one concern to keep the main orchestrator thin. */ -import { API_URL } from '@/lib/api.js'; -import { trpc, trpcClient } from '@/lib/trpc.js'; + import { useMutation, useQueryClient } from '@tanstack/react-query'; import { useEffect } from 'react'; +import { API_URL } from '@/lib/api.js'; +import { trpc, trpcClient } from '@/lib/trpc.js'; import type { WizardAction, WizardState } from './pm-wizard-state.js'; // ============================================================================ diff --git a/web/src/components/projects/pm-wizard-jira-steps.tsx b/web/src/components/projects/pm-wizard-jira-steps.tsx index cdd5042f..66205219 100644 --- a/web/src/components/projects/pm-wizard-jira-steps.tsx +++ b/web/src/components/projects/pm-wizard-jira-steps.tsx @@ -1,11 +1,12 @@ /** * JIRA-specific step renderer components for PMWizard. */ + +import type { UseMutationResult } from '@tanstack/react-query'; +import { Loader2, Plus } from 'lucide-react'; import { Button } from '@/components/ui/button.js'; import { Input } from '@/components/ui/input.js'; import { Label } from '@/components/ui/label.js'; -import type { UseMutationResult } from '@tanstack/react-query'; -import { Loader2, Plus } from 'lucide-react'; import type { WizardAction, WizardState } from './pm-wizard-state.js'; import { FieldMappingRow, SearchableSelect } from './wizard-shared.js'; diff --git a/web/src/components/projects/pm-wizard-trello-steps.tsx b/web/src/components/projects/pm-wizard-trello-steps.tsx index d381f370..22aa072b 100644 --- a/web/src/components/projects/pm-wizard-trello-steps.tsx +++ b/web/src/components/projects/pm-wizard-trello-steps.tsx @@ -1,13 +1,14 @@ /** * Trello-specific step renderer components for PMWizard. */ -import { Button } from '@/components/ui/button.js'; -import { Input } from '@/components/ui/input.js'; -import { Label } from '@/components/ui/label.js'; + import type { UseMutationResult } from '@tanstack/react-query'; import { CheckCircle2, Loader2, Plus } from 'lucide-react'; import { useEffect, useRef, useState } from 'react'; import { toast } from 'sonner'; +import { Button } from '@/components/ui/button.js'; +import { Input } from '@/components/ui/input.js'; +import { Label } from '@/components/ui/label.js'; import type { WizardAction, WizardState } from './pm-wizard-state.js'; import { FieldMappingRow, SearchableSelect } from './wizard-shared.js'; diff --git a/web/src/components/projects/pm-wizard.tsx b/web/src/components/projects/pm-wizard.tsx index 763d3c3e..3fab7918 100644 --- a/web/src/components/projects/pm-wizard.tsx +++ b/web/src/components/projects/pm-wizard.tsx @@ -1,8 +1,8 @@ -import { Label } from '@/components/ui/label.js'; -import { trpc } from '@/lib/trpc.js'; import { useQuery } from '@tanstack/react-query'; import { CheckCircle, Globe, Loader2, XCircle } from 'lucide-react'; import { useEffect, useReducer, useRef, useState } from 'react'; +import { Label } from '@/components/ui/label.js'; +import { trpc } from '@/lib/trpc.js'; import { SaveStep, WebhookStep } from './pm-wizard-common-steps.js'; import { useJiraCustomFieldCreation, diff --git a/web/src/components/projects/project-agent-configs.tsx b/web/src/components/projects/project-agent-configs.tsx index abf302bd..be9d3ede 100644 --- a/web/src/components/projects/project-agent-configs.tsx +++ b/web/src/components/projects/project-agent-configs.tsx @@ -1,3 +1,7 @@ +import { useMutation, useQuery, useQueryClient } from '@tanstack/react-query'; +import { AlertTriangle, ArrowLeft, ChevronRight, Trash2 } from 'lucide-react'; +import { useEffect, useMemo, useRef, useState } from 'react'; +import { toast } from 'sonner'; import { engineCredentialKeys } from '@/components/projects/engine-secrets.js'; import { EngineSettingsFields } from '@/components/settings/engine-settings-fields.js'; import { ModelField } from '@/components/settings/model-field.js'; @@ -46,10 +50,6 @@ import { type TriggerParameterValue, } from '@/lib/trigger-agent-mapping.js'; import { trpc, trpcClient } from '@/lib/trpc.js'; -import { useMutation, useQuery, useQueryClient } from '@tanstack/react-query'; -import { AlertTriangle, ArrowLeft, ChevronRight, Trash2 } from 'lucide-react'; -import { useEffect, useMemo, useRef, useState } from 'react'; -import { toast } from 'sonner'; import { AgentPromptOverrides } from './agent-prompt-overrides.js'; interface AgentConfig { diff --git a/web/src/components/projects/project-form-dialog.tsx b/web/src/components/projects/project-form-dialog.tsx index 993981fe..fd3b67c4 100644 --- a/web/src/components/projects/project-form-dialog.tsx +++ b/web/src/components/projects/project-form-dialog.tsx @@ -1,9 +1,9 @@ +import { useMutation, useQueryClient } from '@tanstack/react-query'; +import { useState } from 'react'; import { Dialog, DialogContent, DialogHeader, DialogTitle } from '@/components/ui/dialog.js'; import { Input } from '@/components/ui/input.js'; import { Label } from '@/components/ui/label.js'; import { trpc, trpcClient } from '@/lib/trpc.js'; -import { useMutation, useQueryClient } from '@tanstack/react-query'; -import { useState } from 'react'; function slugify(name: string): string { return name diff --git a/web/src/components/projects/project-general-form.tsx b/web/src/components/projects/project-general-form.tsx index 700d6f09..5ee43b5d 100644 --- a/web/src/components/projects/project-general-form.tsx +++ b/web/src/components/projects/project-general-form.tsx @@ -1,3 +1,8 @@ +import { useMutation, useQuery, useQueryClient } from '@tanstack/react-query'; +import { useNavigate } from '@tanstack/react-router'; +import { HelpCircle } from 'lucide-react'; +import { useMemo, useState } from 'react'; +import { toast } from 'sonner'; import { ProjectSecretField } from '@/components/projects/project-secret-field.js'; import { useProjectUpdate } from '@/components/projects/use-project-update.js'; import { OpenRouterModelCombobox } from '@/components/settings/openrouter-model-combobox.js'; @@ -21,11 +26,6 @@ import { TooltipTrigger, } from '@/components/ui/tooltip.js'; import { trpc, trpcClient } from '@/lib/trpc.js'; -import { useMutation, useQuery, useQueryClient } from '@tanstack/react-query'; -import { useNavigate } from '@tanstack/react-router'; -import { HelpCircle } from 'lucide-react'; -import { useMemo, useState } from 'react'; -import { toast } from 'sonner'; interface Project { id: string; diff --git a/web/src/components/projects/project-harness-form.tsx b/web/src/components/projects/project-harness-form.tsx index e6370e0a..b895cc35 100644 --- a/web/src/components/projects/project-harness-form.tsx +++ b/web/src/components/projects/project-harness-form.tsx @@ -1,3 +1,6 @@ +import { useQuery } from '@tanstack/react-query'; +import { HelpCircle } from 'lucide-react'; +import { useState } from 'react'; import { ENGINE_SECRETS } from '@/components/projects/engine-secrets.js'; import { ProjectSecretField } from '@/components/projects/project-secret-field.js'; import { useProjectUpdate } from '@/components/projects/use-project-update.js'; @@ -22,9 +25,6 @@ import { TooltipTrigger, } from '@/components/ui/tooltip.js'; import { trpc } from '@/lib/trpc.js'; -import { useQuery } from '@tanstack/react-query'; -import { HelpCircle } from 'lucide-react'; -import { useState } from 'react'; interface Project { id: string; diff --git a/web/src/components/projects/project-lifecycle-automations.tsx b/web/src/components/projects/project-lifecycle-automations.tsx index 7ce5d507..05650674 100644 --- a/web/src/components/projects/project-lifecycle-automations.tsx +++ b/web/src/components/projects/project-lifecycle-automations.tsx @@ -1,8 +1,8 @@ +import { useMutation, useQuery, useQueryClient } from '@tanstack/react-query'; +import { useEffect, useMemo, useRef, useState } from 'react'; import { TriggerToggles } from '@/components/shared/trigger-toggles.js'; import { LIFECYCLE_TRIGGERS } from '@/lib/trigger-agent-mapping.js'; import { trpc, trpcClient } from '@/lib/trpc.js'; -import { useMutation, useQuery, useQueryClient } from '@tanstack/react-query'; -import { useEffect, useMemo, useRef, useState } from 'react'; export function ProjectLifecycleAutomations({ projectId }: { projectId: string }) { const queryClient = useQueryClient(); @@ -21,7 +21,10 @@ export function ProjectLifecycleAutomations({ projectId }: { projectId: string } mutationFn: ({ category, triggers, - }: { category: 'pm' | 'scm'; triggers: Record }) => + }: { + category: 'pm' | 'scm'; + triggers: Record; + }) => trpcClient.projects.integrations.updateTriggers.mutate({ projectId, category, diff --git a/web/src/components/projects/project-secret-field.tsx b/web/src/components/projects/project-secret-field.tsx index 8e834ec6..cd1c58f5 100644 --- a/web/src/components/projects/project-secret-field.tsx +++ b/web/src/components/projects/project-secret-field.tsx @@ -2,13 +2,14 @@ * Reusable project-scoped secret input field. * Write-only — shows masked metadata when configured, never exposes plaintext. */ + +import { useMutation, useQueryClient } from '@tanstack/react-query'; +import { CheckCircle, Loader2, Trash2, XCircle } from 'lucide-react'; +import { useState } from 'react'; import { Badge } from '@/components/ui/badge.js'; import { Input } from '@/components/ui/input.js'; import { Label } from '@/components/ui/label.js'; import { trpc, trpcClient } from '@/lib/trpc.js'; -import { useMutation, useQueryClient } from '@tanstack/react-query'; -import { CheckCircle, Loader2, Trash2, XCircle } from 'lucide-react'; -import { useState } from 'react'; export interface ProjectCredentialMeta { envVarKey: string; diff --git a/web/src/components/projects/project-work-table.tsx b/web/src/components/projects/project-work-table.tsx index 8f8bb7d4..8b224367 100644 --- a/web/src/components/projects/project-work-table.tsx +++ b/web/src/components/projects/project-work-table.tsx @@ -1,8 +1,7 @@ -import { agentTypeLabel, getAgentColor } from '@/lib/chart-colors.js'; -import { formatCostSummary } from '@/lib/utils.js'; import { useNavigate } from '@tanstack/react-router'; -import { Link } from '@tanstack/react-router'; import { ClipboardList, ExternalLink, GitPullRequest } from 'lucide-react'; +import { agentTypeLabel, getAgentColor } from '@/lib/chart-colors.js'; +import { formatCostSummary } from '@/lib/utils.js'; import { WorkItemDurationBar } from './work-item-duration-bar.js'; interface WorkItemRun { diff --git a/web/src/components/projects/projects-table.tsx b/web/src/components/projects/projects-table.tsx index eb9576be..cb2f101d 100644 --- a/web/src/components/projects/projects-table.tsx +++ b/web/src/components/projects/projects-table.tsx @@ -1,3 +1,7 @@ +import { useMutation, useQueryClient } from '@tanstack/react-query'; +import { useNavigate } from '@tanstack/react-router'; +import { FolderGit2, Trash2 } from 'lucide-react'; +import { useState } from 'react'; import { AlertDialog, AlertDialogAction, @@ -18,10 +22,6 @@ import { TableRow, } from '@/components/ui/table.js'; import { trpc, trpcClient } from '@/lib/trpc.js'; -import { useMutation, useQueryClient } from '@tanstack/react-query'; -import { useNavigate } from '@tanstack/react-router'; -import { FolderGit2, Trash2 } from 'lucide-react'; -import { useState } from 'react'; interface Project { id: string; diff --git a/web/src/components/projects/use-project-update.ts b/web/src/components/projects/use-project-update.ts index 9d175003..dfcd324c 100644 --- a/web/src/components/projects/use-project-update.ts +++ b/web/src/components/projects/use-project-update.ts @@ -1,5 +1,5 @@ -import { trpc, trpcClient } from '@/lib/trpc.js'; import { useMutation, useQueryClient } from '@tanstack/react-query'; +import { trpc, trpcClient } from '@/lib/trpc.js'; type ProjectUpdateInput = Parameters[0]; diff --git a/web/src/components/projects/wizard-shared.tsx b/web/src/components/projects/wizard-shared.tsx index 1b2510f1..fae858b3 100644 --- a/web/src/components/projects/wizard-shared.tsx +++ b/web/src/components/projects/wizard-shared.tsx @@ -2,9 +2,10 @@ * Shared wizard UI components used across pm-wizard and email-wizard. * Extracted to eliminate ~250 lines of verbatim duplication. */ -import { Input } from '@/components/ui/input.js'; + import { AlertCircle, Check, ChevronDown, ChevronRight, Loader2, RefreshCw } from 'lucide-react'; import { useState } from 'react'; +import { Input } from '@/components/ui/input.js'; // ============================================================================ // WizardStep Shell diff --git a/web/src/components/runs/cancel-run-button.tsx b/web/src/components/runs/cancel-run-button.tsx index 0bd83c6b..89f5135d 100644 --- a/web/src/components/runs/cancel-run-button.tsx +++ b/web/src/components/runs/cancel-run-button.tsx @@ -1,3 +1,6 @@ +import { useMutation, useQueryClient } from '@tanstack/react-query'; +import { CheckCircle, Loader2, Square } from 'lucide-react'; +import { useEffect, useState } from 'react'; import { AlertDialog, AlertDialogAction, @@ -10,9 +13,6 @@ import { } from '@/components/ui/alert-dialog.js'; import { Button } from '@/components/ui/button.js'; import { trpc, trpcClient } from '@/lib/trpc.js'; -import { useMutation, useQueryClient } from '@tanstack/react-query'; -import { CheckCircle, Loader2, Square } from 'lucide-react'; -import { useEffect, useState } from 'react'; interface CancelRunButtonProps { runId: string; @@ -51,68 +51,64 @@ export function CancelRunButton({ runId, status }: CancelRunButtonProps) { } return ( - <> - - - - {showSuccess && !cancelMutation.isPending && ( - Cancelled - )} - {cancelMutation.isError && !showSuccess && ( - - {cancelMutation.error instanceof Error - ? `Error: ${cancelMutation.error.message}` - : 'Failed'} - + + + + {showSuccess && !cancelMutation.isPending && ( + Cancelled + )} + {cancelMutation.isError && !showSuccess && ( + + {cancelMutation.error instanceof Error + ? `Error: ${cancelMutation.error.message}` + : 'Failed'} + + )} + - - - Cancel Run - - This will terminate the worker container. Are you sure? - - - - Cancel - { - cancelMutation.mutate(); - }} - className="bg-destructive text-destructive-foreground hover:bg-destructive/90" - > - Terminate - - - - - + + + Cancel Run + + This will terminate the worker container. Are you sure? + + + + Cancel + { + cancelMutation.mutate(); + }} + className="bg-destructive text-destructive-foreground hover:bg-destructive/90" + > + Terminate + + + + ); } diff --git a/web/src/components/runs/project-work-duration-chart.tsx b/web/src/components/runs/project-work-duration-chart.tsx index b841abac..ee2577ca 100644 --- a/web/src/components/runs/project-work-duration-chart.tsx +++ b/web/src/components/runs/project-work-duration-chart.tsx @@ -1,6 +1,3 @@ -import { Card, CardContent, CardHeader, CardTitle } from '@/components/ui/card.js'; -import { agentTypeLabel, getAgentColor } from '@/lib/chart-colors.js'; -import { formatDuration } from '@/lib/utils.js'; import { Bar, BarChart, @@ -12,6 +9,9 @@ import { XAxis, YAxis, } from 'recharts'; +import { Card, CardContent, CardHeader, CardTitle } from '@/components/ui/card.js'; +import { agentTypeLabel, getAgentColor } from '@/lib/chart-colors.js'; +import { formatDuration } from '@/lib/utils.js'; interface AgentTypeBreakdown { agentType: string; diff --git a/web/src/components/runs/retry-run-button.tsx b/web/src/components/runs/retry-run-button.tsx index 36538846..0d9c8b45 100644 --- a/web/src/components/runs/retry-run-button.tsx +++ b/web/src/components/runs/retry-run-button.tsx @@ -1,7 +1,7 @@ -import { Button } from '@/components/ui/button.js'; -import { trpc, trpcClient } from '@/lib/trpc.js'; import { useMutation, useQueryClient } from '@tanstack/react-query'; import { RefreshCw } from 'lucide-react'; +import { Button } from '@/components/ui/button.js'; +import { trpc, trpcClient } from '@/lib/trpc.js'; interface RetryRunButtonProps { runId: string; diff --git a/web/src/components/runs/run-filters.tsx b/web/src/components/runs/run-filters.tsx index 20a6fe76..82dd92e0 100644 --- a/web/src/components/runs/run-filters.tsx +++ b/web/src/components/runs/run-filters.tsx @@ -1,5 +1,5 @@ -import { trpc } from '@/lib/trpc.js'; import { useQuery } from '@tanstack/react-query'; +import { trpc } from '@/lib/trpc.js'; interface RunFiltersProps { projectId: string; diff --git a/web/src/components/runs/run-summary-card.tsx b/web/src/components/runs/run-summary-card.tsx index becd9c43..27eff33c 100644 --- a/web/src/components/runs/run-summary-card.tsx +++ b/web/src/components/runs/run-summary-card.tsx @@ -1,8 +1,8 @@ -import { useElapsedTime } from '@/lib/useElapsedTime.js'; -import { formatCost, formatDuration } from '@/lib/utils.js'; import { Link } from '@tanstack/react-router'; import { ExternalLink } from 'lucide-react'; import { useState } from 'react'; +import { useElapsedTime } from '@/lib/useElapsedTime.js'; +import { formatCost, formatDuration } from '@/lib/utils.js'; const OUTPUT_COLLAPSE_THRESHOLD = 500; diff --git a/web/src/components/runs/runs-table.tsx b/web/src/components/runs/runs-table.tsx index 8ee46f18..ab03823b 100644 --- a/web/src/components/runs/runs-table.tsx +++ b/web/src/components/runs/runs-table.tsx @@ -1,6 +1,6 @@ -import { formatCost, formatRelativeTime } from '@/lib/utils.js'; import { Link } from '@tanstack/react-router'; import { Activity, ExternalLink } from 'lucide-react'; +import { formatCost, formatRelativeTime } from '@/lib/utils.js'; import { CancelRunButton } from './cancel-run-button.js'; import { LiveDuration } from './live-duration.js'; import { RetryRunButton } from './retry-run-button.js'; diff --git a/web/src/components/runs/trigger-run-dialog.tsx b/web/src/components/runs/trigger-run-dialog.tsx index c69796b6..814c58da 100644 --- a/web/src/components/runs/trigger-run-dialog.tsx +++ b/web/src/components/runs/trigger-run-dialog.tsx @@ -1,3 +1,5 @@ +import { useMutation, useQuery, useQueryClient } from '@tanstack/react-query'; +import { useCallback, useState } from 'react'; import { Button } from '@/components/ui/button.js'; import { Dialog, DialogContent, DialogHeader, DialogTitle } from '@/components/ui/dialog.js'; import { Input } from '@/components/ui/input.js'; @@ -10,8 +12,6 @@ import { SelectValue, } from '@/components/ui/select.js'; import { trpc, trpcClient } from '@/lib/trpc.js'; -import { useMutation, useQuery, useQueryClient } from '@tanstack/react-query'; -import { useCallback, useState } from 'react'; // Keep in sync with AgentType in src/types/index.ts const agentTypes = [ diff --git a/web/src/components/runs/work-item-cost-chart.tsx b/web/src/components/runs/work-item-cost-chart.tsx index f756a95f..630efe36 100644 --- a/web/src/components/runs/work-item-cost-chart.tsx +++ b/web/src/components/runs/work-item-cost-chart.tsx @@ -1,7 +1,7 @@ +import { Cell, Label, Legend, Pie, PieChart, ResponsiveContainer, Tooltip } from 'recharts'; import { Card, CardContent, CardHeader, CardTitle } from '@/components/ui/card.js'; import { agentTypeLabel, getAgentColor } from '@/lib/chart-colors.js'; import { formatCostSummary } from '@/lib/utils.js'; -import { Cell, Label, Legend, Pie, PieChart, ResponsiveContainer, Tooltip } from 'recharts'; interface WorkItemRun { id: string; diff --git a/web/src/components/runs/work-item-runs-table.tsx b/web/src/components/runs/work-item-runs-table.tsx index 8c7cc252..9f49baca 100644 --- a/web/src/components/runs/work-item-runs-table.tsx +++ b/web/src/components/runs/work-item-runs-table.tsx @@ -1,5 +1,5 @@ -import { formatCost, formatRelativeTime } from '@/lib/utils.js'; import { Link } from '@tanstack/react-router'; +import { formatCost, formatRelativeTime } from '@/lib/utils.js'; import { CancelRunButton } from './cancel-run-button.js'; import { LiveDuration } from './live-duration.js'; import { RetryRunButton } from './retry-run-button.js'; diff --git a/web/src/components/settings/agent-config-form-dialog.tsx b/web/src/components/settings/agent-config-form-dialog.tsx index 89f9fda1..60f7277b 100644 --- a/web/src/components/settings/agent-config-form-dialog.tsx +++ b/web/src/components/settings/agent-config-form-dialog.tsx @@ -1,3 +1,6 @@ +import { useMutation, useQuery, useQueryClient } from '@tanstack/react-query'; +import { Link } from '@tanstack/react-router'; +import { useState } from 'react'; import { EngineSettingsFields } from '@/components/settings/engine-settings-fields.js'; import { ModelField } from '@/components/settings/model-field.js'; import { Dialog, DialogContent, DialogHeader, DialogTitle } from '@/components/ui/dialog.js'; @@ -11,9 +14,6 @@ import { SelectValue, } from '@/components/ui/select.js'; import { trpc, trpcClient } from '@/lib/trpc.js'; -import { useMutation, useQuery, useQueryClient } from '@tanstack/react-query'; -import { Link } from '@tanstack/react-router'; -import { useState } from 'react'; import type { AgentConfig } from './agent-configs-table.js'; diff --git a/web/src/components/settings/agent-configs-table.tsx b/web/src/components/settings/agent-configs-table.tsx index 3c4d05f7..d38a6f5e 100644 --- a/web/src/components/settings/agent-configs-table.tsx +++ b/web/src/components/settings/agent-configs-table.tsx @@ -1,3 +1,6 @@ +import { useMutation, useQueryClient } from '@tanstack/react-query'; +import { Pencil, Trash2 } from 'lucide-react'; +import { useState } from 'react'; import { AlertDialog, AlertDialogAction, @@ -17,9 +20,6 @@ import { TableRow, } from '@/components/ui/table.js'; import { trpc, trpcClient } from '@/lib/trpc.js'; -import { useMutation, useQueryClient } from '@tanstack/react-query'; -import { Pencil, Trash2 } from 'lucide-react'; -import { useState } from 'react'; import { AgentConfigFormDialog } from './agent-config-form-dialog.js'; export interface AgentConfig { diff --git a/web/src/components/settings/agent-definition-editor.tsx b/web/src/components/settings/agent-definition-editor.tsx index 27d6e4a6..dd78efac 100644 --- a/web/src/components/settings/agent-definition-editor.tsx +++ b/web/src/components/settings/agent-definition-editor.tsx @@ -1,9 +1,9 @@ +import { useQuery } from '@tanstack/react-query'; import { Input } from '@/components/ui/input.js'; import { Label } from '@/components/ui/label.js'; import { Tabs, TabsContent, TabsList, TabsTrigger } from '@/components/ui/tabs.js'; import { Textarea } from '@/components/ui/textarea.js'; import { trpc } from '@/lib/trpc.js'; -import { useQuery } from '@tanstack/react-query'; import { PromptsPanel } from './agent-definition-prompts.js'; import { CapabilitiesSection, diff --git a/web/src/components/settings/agent-definition-prompts.tsx b/web/src/components/settings/agent-definition-prompts.tsx index 6dee0632..0f62a894 100644 --- a/web/src/components/settings/agent-definition-prompts.tsx +++ b/web/src/components/settings/agent-definition-prompts.tsx @@ -3,10 +3,11 @@ * Extracted from agent-definition-editor.tsx — handles all prompt editing * functionality as a self-contained module with its own queries and mutations. */ -import { Badge } from '@/components/ui/badge.js'; -import { trpc, trpcClient } from '@/lib/trpc.js'; + import { useMutation, useQuery, useQueryClient } from '@tanstack/react-query'; import { useEffect, useState } from 'react'; +import { Badge } from '@/components/ui/badge.js'; +import { trpc, trpcClient } from '@/lib/trpc.js'; import { ReferencePanel } from './prompt-editor.js'; // ───────────────────────────────────────────────────────────────────────────── diff --git a/web/src/components/settings/agent-definition-sections.tsx b/web/src/components/settings/agent-definition-sections.tsx index 4ed308d6..9ec99675 100644 --- a/web/src/components/settings/agent-definition-sections.tsx +++ b/web/src/components/settings/agent-definition-sections.tsx @@ -17,9 +17,9 @@ import { type AgentDefinition, CAPABILITY_GROUPS, type Capability, + deepSet, type SchemaData, Toggle, - deepSet, } from './agent-definition-shared.js'; // ───────────────────────────────────────────────────────────────────────────── diff --git a/web/src/components/settings/agent-definition-shared.tsx b/web/src/components/settings/agent-definition-shared.tsx index 5793f673..ffa9b6a9 100644 --- a/web/src/components/settings/agent-definition-shared.tsx +++ b/web/src/components/settings/agent-definition-shared.tsx @@ -3,6 +3,9 @@ * Extracted from agent-definition-editor.tsx to serve as the foundational leaf * of the import graph — this file must NOT import from any sibling agent-definition-* file. */ + +import type { inferRouterOutputs } from '@trpc/server'; +import { Info } from 'lucide-react'; import type { AppRouter } from '@/../../src/api/router.js'; import type { KnownTriggerEvent } from '@/../../src/api/routers/_shared/triggerTypes.js'; import { Badge } from '@/components/ui/badge.js'; @@ -12,8 +15,6 @@ import { TooltipProvider, TooltipTrigger, } from '@/components/ui/tooltip.js'; -import type { inferRouterOutputs } from '@trpc/server'; -import { Info } from 'lucide-react'; // ───────────────────────────────────────────────────────────────────────────── // Type aliases diff --git a/web/src/components/settings/agent-definition-table.tsx b/web/src/components/settings/agent-definition-table.tsx index dd6fd776..c80d41b1 100644 --- a/web/src/components/settings/agent-definition-table.tsx +++ b/web/src/components/settings/agent-definition-table.tsx @@ -1,3 +1,6 @@ +import { useMutation, useQueryClient } from '@tanstack/react-query'; +import type { inferRouterOutputs } from '@trpc/server'; +import { Pencil, RotateCcw, Trash2 } from 'lucide-react'; import type { AppRouter } from '@/../../src/api/router.js'; import { Badge } from '@/components/ui/badge.js'; import { @@ -9,9 +12,6 @@ import { TableRow, } from '@/components/ui/table.js'; import { trpc, trpcClient } from '@/lib/trpc.js'; -import { useMutation, useQueryClient } from '@tanstack/react-query'; -import type { inferRouterOutputs } from '@trpc/server'; -import { Pencil, RotateCcw, Trash2 } from 'lucide-react'; type RouterOutput = inferRouterOutputs; type DefinitionRow = RouterOutput['agentDefinitions']['list'][number]; diff --git a/web/src/components/settings/model-field.tsx b/web/src/components/settings/model-field.tsx index f6aa2772..5af512dc 100644 --- a/web/src/components/settings/model-field.tsx +++ b/web/src/components/settings/model-field.tsx @@ -1,3 +1,4 @@ +import { useQuery } from '@tanstack/react-query'; import { OpenRouterModelCombobox } from '@/components/settings/openrouter-model-combobox.js'; import { Input } from '@/components/ui/input.js'; import { @@ -8,7 +9,6 @@ import { SelectValue, } from '@/components/ui/select.js'; import { trpc } from '@/lib/trpc.js'; -import { useQuery } from '@tanstack/react-query'; interface ModelFieldProps { value: string; diff --git a/web/src/components/settings/openrouter-model-combobox.tsx b/web/src/components/settings/openrouter-model-combobox.tsx index e296ce2d..bc8ee957 100644 --- a/web/src/components/settings/openrouter-model-combobox.tsx +++ b/web/src/components/settings/openrouter-model-combobox.tsx @@ -1,9 +1,9 @@ +import { useQuery } from '@tanstack/react-query'; import type { ComboboxOption } from '@/components/ui/combobox.js'; import { Combobox } from '@/components/ui/combobox.js'; import { Input } from '@/components/ui/input.js'; -import { OPENROUTER_PREFIX, addPrefix, modelDetail, modelGroup } from '@/lib/openrouter-utils.js'; +import { addPrefix, modelDetail, modelGroup, OPENROUTER_PREFIX } from '@/lib/openrouter-utils.js'; import { trpc } from '@/lib/trpc.js'; -import { useQuery } from '@tanstack/react-query'; interface OpenRouterModelComboboxProps { projectId: string; diff --git a/web/src/components/settings/org-form.tsx b/web/src/components/settings/org-form.tsx index ab88cd76..db4e10d8 100644 --- a/web/src/components/settings/org-form.tsx +++ b/web/src/components/settings/org-form.tsx @@ -1,8 +1,8 @@ +import { useMutation, useQuery, useQueryClient } from '@tanstack/react-query'; +import { useEffect, useState } from 'react'; import { Input } from '@/components/ui/input.js'; import { Label } from '@/components/ui/label.js'; import { trpc, trpcClient } from '@/lib/trpc.js'; -import { useMutation, useQuery, useQueryClient } from '@tanstack/react-query'; -import { useEffect, useState } from 'react'; export function OrgForm() { const queryClient = useQueryClient(); diff --git a/web/src/components/settings/prompt-editor.tsx b/web/src/components/settings/prompt-editor.tsx index aefb0caa..28f953d2 100644 --- a/web/src/components/settings/prompt-editor.tsx +++ b/web/src/components/settings/prompt-editor.tsx @@ -1,8 +1,8 @@ -import { Badge } from '@/components/ui/badge.js'; -import { trpc, trpcClient } from '@/lib/trpc.js'; import { useMutation, useQuery, useQueryClient } from '@tanstack/react-query'; import { ChevronDown, ChevronRight } from 'lucide-react'; import { useEffect, useState } from 'react'; +import { Badge } from '@/components/ui/badge.js'; +import { trpc, trpcClient } from '@/lib/trpc.js'; interface PromptEditorProps { target: { name: string }; @@ -13,7 +13,7 @@ export function PromptEditor({ target, onClose }: PromptEditorProps) { return ; } -function PartialEditor({ name, onClose }: { name: string; onClose: () => void }) { +function PartialEditor({ name, onClose: _onClose }: { name: string; onClose: () => void }) { const queryClient = useQueryClient(); const [content, setContent] = useState(''); const [validationStatus, setValidationStatus] = useState(null); diff --git a/web/src/components/settings/useDefinitionEditor.ts b/web/src/components/settings/useDefinitionEditor.ts index f29e8b4e..6c960726 100644 --- a/web/src/components/settings/useDefinitionEditor.ts +++ b/web/src/components/settings/useDefinitionEditor.ts @@ -3,9 +3,10 @@ * agent definition editor. Extracted from agent-definition-editor.tsx to keep * the main component as a thin orchestrator. */ -import { trpc, trpcClient } from '@/lib/trpc.js'; + import { useMutation, useQueryClient } from '@tanstack/react-query'; import { useState } from 'react'; +import { trpc, trpcClient } from '@/lib/trpc.js'; import { type AgentDefinition, type DefinitionRow, diff --git a/web/src/components/settings/user-form-dialog.tsx b/web/src/components/settings/user-form-dialog.tsx index 8632c9c6..cccfddd0 100644 --- a/web/src/components/settings/user-form-dialog.tsx +++ b/web/src/components/settings/user-form-dialog.tsx @@ -1,9 +1,9 @@ +import { useMutation, useQueryClient } from '@tanstack/react-query'; +import { useState } from 'react'; import { Dialog, DialogContent, DialogHeader, DialogTitle } from '@/components/ui/dialog.js'; import { Input } from '@/components/ui/input.js'; import { Label } from '@/components/ui/label.js'; import { trpc, trpcClient } from '@/lib/trpc.js'; -import { useMutation, useQueryClient } from '@tanstack/react-query'; -import { useState } from 'react'; interface User { id: string; diff --git a/web/src/components/settings/users-table.tsx b/web/src/components/settings/users-table.tsx index 66097441..629b5b8a 100644 --- a/web/src/components/settings/users-table.tsx +++ b/web/src/components/settings/users-table.tsx @@ -1,3 +1,6 @@ +import { useMutation, useQueryClient } from '@tanstack/react-query'; +import { Pencil, Trash2 } from 'lucide-react'; +import { useState } from 'react'; import { AlertDialog, AlertDialogAction, @@ -18,9 +21,6 @@ import { TableRow, } from '@/components/ui/table.js'; import { trpc, trpcClient } from '@/lib/trpc.js'; -import { useMutation, useQueryClient } from '@tanstack/react-query'; -import { Pencil, Trash2 } from 'lucide-react'; -import { useState } from 'react'; import { UserFormDialog } from './user-form-dialog.js'; interface User { diff --git a/web/src/components/shared/trigger-toggles.tsx b/web/src/components/shared/trigger-toggles.tsx index 6c9576dc..0323e8bb 100644 --- a/web/src/components/shared/trigger-toggles.tsx +++ b/web/src/components/shared/trigger-toggles.tsx @@ -1,5 +1,5 @@ import { Label } from '@/components/ui/label.js'; -import { type TriggerDef, getTriggerValue, setTriggerValue } from '@/lib/trigger-agent-mapping.js'; +import { getTriggerValue, setTriggerValue, type TriggerDef } from '@/lib/trigger-agent-mapping.js'; export type { TriggerDef }; diff --git a/web/src/components/ui/badge.tsx b/web/src/components/ui/badge.tsx index 5967b1ed..06916808 100644 --- a/web/src/components/ui/badge.tsx +++ b/web/src/components/ui/badge.tsx @@ -1,4 +1,4 @@ -import { type VariantProps, cva } from 'class-variance-authority'; +import { cva, type VariantProps } from 'class-variance-authority'; import { Slot } from 'radix-ui'; import type * as React from 'react'; diff --git a/web/src/components/ui/button.tsx b/web/src/components/ui/button.tsx index 469e41b7..c305fa12 100644 --- a/web/src/components/ui/button.tsx +++ b/web/src/components/ui/button.tsx @@ -1,4 +1,4 @@ -import { type VariantProps, cva } from 'class-variance-authority'; +import { cva, type VariantProps } from 'class-variance-authority'; import { Slot } from 'radix-ui'; import type * as React from 'react'; diff --git a/web/src/components/ui/card.tsx b/web/src/components/ui/card.tsx index 273b28d9..7891723c 100644 --- a/web/src/components/ui/card.tsx +++ b/web/src/components/ui/card.tsx @@ -72,4 +72,4 @@ function CardFooter({ className, ...props }: React.ComponentProps<'div'>) { ); } -export { Card, CardHeader, CardFooter, CardTitle, CardAction, CardDescription, CardContent }; +export { Card, CardAction, CardContent, CardDescription, CardFooter, CardHeader, CardTitle }; diff --git a/web/src/components/ui/combobox.tsx b/web/src/components/ui/combobox.tsx index ab8dd6aa..787fccb6 100644 --- a/web/src/components/ui/combobox.tsx +++ b/web/src/components/ui/combobox.tsx @@ -1,9 +1,9 @@ -import { Button } from '@/components/ui/button.js'; -import { cn } from '@/lib/utils.js'; import { Command as CommandPrimitive } from 'cmdk'; import { Check, ChevronsUpDown } from 'lucide-react'; import { Popover as PopoverPrimitive } from 'radix-ui'; import * as React from 'react'; +import { Button } from '@/components/ui/button.js'; +import { cn } from '@/lib/utils.js'; export interface ComboboxOption { value: string; diff --git a/web/src/components/ui/form.tsx b/web/src/components/ui/form.tsx index eb0b6c40..9de28481 100644 --- a/web/src/components/ui/form.tsx +++ b/web/src/components/ui/form.tsx @@ -139,12 +139,12 @@ function FormMessage({ className, ...props }: React.ComponentProps<'p'>) { } export { - useFormField, Form, - FormItem, - FormLabel, FormControl, FormDescription, - FormMessage, FormField, + FormItem, + FormLabel, + FormMessage, + useFormField, }; diff --git a/web/src/components/ui/table.tsx b/web/src/components/ui/table.tsx index 53a7a71e..41ada581 100644 --- a/web/src/components/ui/table.tsx +++ b/web/src/components/ui/table.tsx @@ -89,4 +89,4 @@ function TableCaption({ className, ...props }: React.ComponentProps<'caption'>) ); } -export { Table, TableHeader, TableBody, TableFooter, TableHead, TableRow, TableCell, TableCaption }; +export { Table, TableBody, TableCaption, TableCell, TableFooter, TableHead, TableHeader, TableRow }; diff --git a/web/src/components/ui/tabs.tsx b/web/src/components/ui/tabs.tsx index 178dd818..cc369e28 100644 --- a/web/src/components/ui/tabs.tsx +++ b/web/src/components/ui/tabs.tsx @@ -1,6 +1,6 @@ 'use client'; -import { type VariantProps, cva } from 'class-variance-authority'; +import { cva, type VariantProps } from 'class-variance-authority'; import { Tabs as TabsPrimitive } from 'radix-ui'; import type * as React from 'react'; @@ -78,4 +78,4 @@ function TabsContent({ className, ...props }: React.ComponentProps(null); const isAdmin = me?.role === 'superadmin'; const initialized = useRef(false); diff --git a/web/src/lib/trigger-agent-mapping.ts b/web/src/lib/trigger-agent-mapping.ts index d2526d2f..0c7fbe1a 100644 --- a/web/src/lib/trigger-agent-mapping.ts +++ b/web/src/lib/trigger-agent-mapping.ts @@ -3,14 +3,14 @@ * Uses definition-based triggers from the API via agentTriggerConfigs.getProjectTriggersView. */ -// Re-export shared types for convenience -export { TRIGGER_CATEGORY_LABELS as CATEGORY_LABELS } from '../../../src/api/routers/_shared/triggerTypes.js'; export type { + ProjectTriggersView, ResolvedTrigger, TriggerParameterDef, TriggerParameterValue, - ProjectTriggersView, } from '../../../src/api/routers/_shared/triggerTypes.js'; +// Re-export shared types for convenience +export { TRIGGER_CATEGORY_LABELS as CATEGORY_LABELS } from '../../../src/api/routers/_shared/triggerTypes.js'; // ============================================================================ // Types diff --git a/web/src/routes/__root.tsx b/web/src/routes/__root.tsx index d3570ad6..be6ca882 100644 --- a/web/src/routes/__root.tsx +++ b/web/src/routes/__root.tsx @@ -1,13 +1,13 @@ +import { useQuery } from '@tanstack/react-query'; +import { createRootRoute, Outlet, redirect, useRouterState } from '@tanstack/react-router'; +import { Menu } from 'lucide-react'; +import { useState } from 'react'; import { Header } from '@/components/layout/header.js'; import { MobileSidebar } from '@/components/layout/mobile-sidebar.js'; import { Sidebar } from '@/components/layout/sidebar.js'; import { OrgProvider } from '@/lib/org-context.js'; import { queryClient } from '@/lib/query-client.js'; import { trpc } from '@/lib/trpc.js'; -import { useQuery } from '@tanstack/react-query'; -import { Outlet, createRootRoute, redirect, useRouterState } from '@tanstack/react-router'; -import { Menu } from 'lucide-react'; -import { useState } from 'react'; function RootLayout() { const routerState = useRouterState(); diff --git a/web/src/routes/global/definitions.tsx b/web/src/routes/global/definitions.tsx index 4ceb8b25..c0f48198 100644 --- a/web/src/routes/global/definitions.tsx +++ b/web/src/routes/global/definitions.tsx @@ -1,6 +1,10 @@ +import { useMutation, useQuery, useQueryClient } from '@tanstack/react-query'; +import { createRoute } from '@tanstack/react-router'; +import { ArrowLeft, Pencil, Trash2 } from 'lucide-react'; +import { useState } from 'react'; import { AgentDefinitionEditor } from '@/components/settings/agent-definition-editor.js'; -import { AgentDefinitionsTable } from '@/components/settings/agent-definition-table.js'; import type { DefinitionRow } from '@/components/settings/agent-definition-table.js'; +import { AgentDefinitionsTable } from '@/components/settings/agent-definition-table.js'; import { PromptEditor } from '@/components/settings/prompt-editor.js'; import { Badge } from '@/components/ui/badge.js'; import { @@ -12,10 +16,6 @@ import { TableRow, } from '@/components/ui/table.js'; import { trpc, trpcClient } from '@/lib/trpc.js'; -import { useMutation, useQuery, useQueryClient } from '@tanstack/react-query'; -import { createRoute } from '@tanstack/react-router'; -import { ArrowLeft, Pencil, Trash2 } from 'lucide-react'; -import { useState } from 'react'; import { rootRoute } from '../__root.js'; type Tab = 'definitions' | 'partials'; diff --git a/web/src/routes/global/organizations.tsx b/web/src/routes/global/organizations.tsx index 9b426280..0467d03f 100644 --- a/web/src/routes/global/organizations.tsx +++ b/web/src/routes/global/organizations.tsx @@ -1,10 +1,10 @@ -import { OrganizationFormDialog } from '@/components/global/organization-form-dialog.js'; -import { OrganizationsTable } from '@/components/global/organizations-table.js'; -import { trpc } from '@/lib/trpc.js'; import { useQuery } from '@tanstack/react-query'; import { createRoute } from '@tanstack/react-router'; import { Plus } from 'lucide-react'; import { useState } from 'react'; +import { OrganizationFormDialog } from '@/components/global/organization-form-dialog.js'; +import { OrganizationsTable } from '@/components/global/organizations-table.js'; +import { trpc } from '@/lib/trpc.js'; import { rootRoute } from '../__root.js'; interface Organization { diff --git a/web/src/routes/global/runs.tsx b/web/src/routes/global/runs.tsx index 0c182844..b23f9510 100644 --- a/web/src/routes/global/runs.tsx +++ b/web/src/routes/global/runs.tsx @@ -1,9 +1,9 @@ -import { RunFilters } from '@/components/runs/run-filters.js'; -import { RunsTable } from '@/components/runs/runs-table.js'; -import { trpc } from '@/lib/trpc.js'; import { useQuery } from '@tanstack/react-query'; import { createRoute, useNavigate, useSearch } from '@tanstack/react-router'; import { z } from 'zod'; +import { RunFilters } from '@/components/runs/run-filters.js'; +import { RunsTable } from '@/components/runs/runs-table.js'; +import { trpc } from '@/lib/trpc.js'; import { rootRoute } from '../__root.js'; const searchSchema = z.object({ diff --git a/web/src/routes/global/webhook-logs.tsx b/web/src/routes/global/webhook-logs.tsx index 6ac5437f..61b6c71f 100644 --- a/web/src/routes/global/webhook-logs.tsx +++ b/web/src/routes/global/webhook-logs.tsx @@ -1,10 +1,10 @@ -import { WebhookLogDetailDialog } from '@/components/webhooklogs/webhooklog-detail-dialog.js'; -import { WebhookLogsTable } from '@/components/webhooklogs/webhooklogs-table.js'; -import { trpc } from '@/lib/trpc.js'; import { useQuery } from '@tanstack/react-query'; import { createRoute, useNavigate, useSearch } from '@tanstack/react-router'; import { useState } from 'react'; import { z } from 'zod'; +import { WebhookLogDetailDialog } from '@/components/webhooklogs/webhooklog-detail-dialog.js'; +import { WebhookLogsTable } from '@/components/webhooklogs/webhooklogs-table.js'; +import { trpc } from '@/lib/trpc.js'; import { rootRoute } from '../__root.js'; const searchSchema = z.object({ diff --git a/web/src/routes/index.tsx b/web/src/routes/index.tsx index 6e05afa2..d35f19ed 100644 --- a/web/src/routes/index.tsx +++ b/web/src/routes/index.tsx @@ -1,13 +1,13 @@ -import { RunFilters } from '@/components/runs/run-filters.js'; -import { RunsTable } from '@/components/runs/runs-table.js'; -import { TriggerRunDialog } from '@/components/runs/trigger-run-dialog.js'; -import { Button } from '@/components/ui/button.js'; -import { trpc } from '@/lib/trpc.js'; import { useQuery } from '@tanstack/react-query'; import { createRoute, useNavigate, useSearch } from '@tanstack/react-router'; import { Play } from 'lucide-react'; import { useState } from 'react'; import { z } from 'zod'; +import { RunFilters } from '@/components/runs/run-filters.js'; +import { RunsTable } from '@/components/runs/runs-table.js'; +import { TriggerRunDialog } from '@/components/runs/trigger-run-dialog.js'; +import { Button } from '@/components/ui/button.js'; +import { trpc } from '@/lib/trpc.js'; import { rootRoute } from './__root.js'; const searchSchema = z.object({ diff --git a/web/src/routes/login.tsx b/web/src/routes/login.tsx index 83a12345..a8cc608a 100644 --- a/web/src/routes/login.tsx +++ b/web/src/routes/login.tsx @@ -1,7 +1,7 @@ -import { trpc } from '@/lib/trpc.js'; import { useQueryClient } from '@tanstack/react-query'; import { createRoute, useNavigate } from '@tanstack/react-router'; import { useState } from 'react'; +import { trpc } from '@/lib/trpc.js'; import { API_URL } from '../lib/api.js'; import { rootRoute } from './__root.js'; diff --git a/web/src/routes/projects/$projectId.agent-configs.tsx b/web/src/routes/projects/$projectId.agent-configs.tsx index 33555a60..a50c27f0 100644 --- a/web/src/routes/projects/$projectId.agent-configs.tsx +++ b/web/src/routes/projects/$projectId.agent-configs.tsx @@ -1,5 +1,5 @@ -import { ProjectAgentConfigs } from '@/components/projects/project-agent-configs.js'; import { createRoute } from '@tanstack/react-router'; +import { ProjectAgentConfigs } from '@/components/projects/project-agent-configs.js'; import { projectDetailRoute } from './$projectId.js'; function ProjectAgentConfigsPage() { diff --git a/web/src/routes/projects/$projectId.general.tsx b/web/src/routes/projects/$projectId.general.tsx index eb95efae..36326ee9 100644 --- a/web/src/routes/projects/$projectId.general.tsx +++ b/web/src/routes/projects/$projectId.general.tsx @@ -1,7 +1,7 @@ -import { ProjectGeneralForm } from '@/components/projects/project-general-form.js'; -import { trpc } from '@/lib/trpc.js'; import { useQuery } from '@tanstack/react-query'; import { createRoute } from '@tanstack/react-router'; +import { ProjectGeneralForm } from '@/components/projects/project-general-form.js'; +import { trpc } from '@/lib/trpc.js'; import { projectDetailRoute } from './$projectId.js'; function ProjectGeneralPage() { diff --git a/web/src/routes/projects/$projectId.harness.tsx b/web/src/routes/projects/$projectId.harness.tsx index 9a86e4eb..09af7158 100644 --- a/web/src/routes/projects/$projectId.harness.tsx +++ b/web/src/routes/projects/$projectId.harness.tsx @@ -1,7 +1,7 @@ -import { ProjectHarnessForm } from '@/components/projects/project-harness-form.js'; -import { trpc } from '@/lib/trpc.js'; import { useQuery } from '@tanstack/react-query'; import { createRoute } from '@tanstack/react-router'; +import { ProjectHarnessForm } from '@/components/projects/project-harness-form.js'; +import { trpc } from '@/lib/trpc.js'; import { projectDetailRoute } from './$projectId.js'; function ProjectHarnessPage() { diff --git a/web/src/routes/projects/$projectId.integrations.tsx b/web/src/routes/projects/$projectId.integrations.tsx index 91f82611..d425dfbb 100644 --- a/web/src/routes/projects/$projectId.integrations.tsx +++ b/web/src/routes/projects/$projectId.integrations.tsx @@ -1,5 +1,5 @@ -import { IntegrationForm } from '@/components/projects/integration-form.js'; import { createRoute } from '@tanstack/react-router'; +import { IntegrationForm } from '@/components/projects/integration-form.js'; import { projectDetailRoute } from './$projectId.js'; function ProjectIntegrationsPage() { diff --git a/web/src/routes/projects/$projectId.lifecycle.tsx b/web/src/routes/projects/$projectId.lifecycle.tsx index a7246039..54288ea0 100644 --- a/web/src/routes/projects/$projectId.lifecycle.tsx +++ b/web/src/routes/projects/$projectId.lifecycle.tsx @@ -1,5 +1,5 @@ -import { ProjectLifecycleAutomations } from '@/components/projects/project-lifecycle-automations.js'; import { createRoute } from '@tanstack/react-router'; +import { ProjectLifecycleAutomations } from '@/components/projects/project-lifecycle-automations.js'; import { projectDetailRoute } from './$projectId.js'; function ProjectLifecyclePage() { diff --git a/web/src/routes/projects/$projectId.stats.tsx b/web/src/routes/projects/$projectId.stats.tsx index 88bec42b..f4a603fe 100644 --- a/web/src/routes/projects/$projectId.stats.tsx +++ b/web/src/routes/projects/$projectId.stats.tsx @@ -1,12 +1,12 @@ -import { StatsFiltersBar } from '@/components/projects/stats-filters.js'; +import { useQuery } from '@tanstack/react-query'; +import { createRoute } from '@tanstack/react-router'; +import { useMemo, useState } from 'react'; import type { StatsFilters } from '@/components/projects/stats-filters.js'; +import { StatsFiltersBar } from '@/components/projects/stats-filters.js'; import { StatsSummary } from '@/components/projects/stats-summary.js'; import { ProjectWorkDurationChart } from '@/components/runs/project-work-duration-chart.js'; import { WorkItemCostChart } from '@/components/runs/work-item-cost-chart.js'; import { trpc } from '@/lib/trpc.js'; -import { useQuery } from '@tanstack/react-query'; -import { createRoute } from '@tanstack/react-router'; -import { useMemo, useState } from 'react'; import { projectDetailRoute } from './$projectId.js'; export function computeDateFrom(timeRange: string): string | undefined { diff --git a/web/src/routes/projects/$projectId.tsx b/web/src/routes/projects/$projectId.tsx index 6eb0ceb4..400b19b6 100644 --- a/web/src/routes/projects/$projectId.tsx +++ b/web/src/routes/projects/$projectId.tsx @@ -1,4 +1,4 @@ -import { Outlet, createRoute, redirect } from '@tanstack/react-router'; +import { createRoute, Outlet, redirect } from '@tanstack/react-router'; import { rootRoute } from '../__root.js'; function ProjectShellPage() { diff --git a/web/src/routes/projects/$projectId.work.tsx b/web/src/routes/projects/$projectId.work.tsx index 6836aacd..8e8c5122 100644 --- a/web/src/routes/projects/$projectId.work.tsx +++ b/web/src/routes/projects/$projectId.work.tsx @@ -1,8 +1,8 @@ -import { ProjectWorkTable } from '@/components/projects/project-work-table.js'; -import { trpc } from '@/lib/trpc.js'; import { useQuery } from '@tanstack/react-query'; import { createRoute } from '@tanstack/react-router'; import { useState } from 'react'; +import { ProjectWorkTable } from '@/components/projects/project-work-table.js'; +import { trpc } from '@/lib/trpc.js'; import { projectDetailRoute } from './$projectId.js'; const WORK_PAGE_SIZE = 50; diff --git a/web/src/routes/projects/index.tsx b/web/src/routes/projects/index.tsx index b10e9bb5..b7753716 100644 --- a/web/src/routes/projects/index.tsx +++ b/web/src/routes/projects/index.tsx @@ -1,9 +1,9 @@ -import { ProjectFormDialog } from '@/components/projects/project-form-dialog.js'; -import { ProjectsTable } from '@/components/projects/projects-table.js'; -import { trpc } from '@/lib/trpc.js'; import { useQuery } from '@tanstack/react-query'; import { createRoute } from '@tanstack/react-router'; import { useState } from 'react'; +import { ProjectFormDialog } from '@/components/projects/project-form-dialog.js'; +import { ProjectsTable } from '@/components/projects/projects-table.js'; +import { trpc } from '@/lib/trpc.js'; import { rootRoute } from '../__root.js'; function ProjectsListPage() { diff --git a/web/src/routes/prs/$projectId.$prNumber.tsx b/web/src/routes/prs/$projectId.$prNumber.tsx index f6bd76c6..d32b327d 100644 --- a/web/src/routes/prs/$projectId.$prNumber.tsx +++ b/web/src/routes/prs/$projectId.$prNumber.tsx @@ -1,10 +1,10 @@ +import { useQuery } from '@tanstack/react-query'; +import { createRoute } from '@tanstack/react-router'; +import { ExternalLink } from 'lucide-react'; import { WorkItemCostChart } from '@/components/runs/work-item-cost-chart.js'; import { WorkItemDurationChart } from '@/components/runs/work-item-duration-chart.js'; import { WorkItemRunsTable } from '@/components/runs/work-item-runs-table.js'; import { trpc } from '@/lib/trpc.js'; -import { useQuery } from '@tanstack/react-query'; -import { createRoute } from '@tanstack/react-router'; -import { ExternalLink } from 'lucide-react'; import { rootRoute } from '../__root.js'; function PRRunsPage() { diff --git a/web/src/routes/runs/$runId.tsx b/web/src/routes/runs/$runId.tsx index 338b7ed5..b3790aaa 100644 --- a/web/src/routes/runs/$runId.tsx +++ b/web/src/routes/runs/$runId.tsx @@ -1,3 +1,7 @@ +import { useQuery } from '@tanstack/react-query'; +import { createRoute, Link } from '@tanstack/react-router'; +import { FileText, GitPullRequest } from 'lucide-react'; +import { useState } from 'react'; import { DebugAnalysis } from '@/components/debug/debug-analysis.js'; import { LlmCallList } from '@/components/llm-calls/llm-call-list.js'; import { LogViewer } from '@/components/logs/log-viewer.js'; @@ -7,10 +11,6 @@ import { RunStatusBadge } from '@/components/runs/run-status-badge.js'; import { RunSummaryCard } from '@/components/runs/run-summary-card.js'; import { trpc } from '@/lib/trpc.js'; import { cn } from '@/lib/utils.js'; -import { useQuery } from '@tanstack/react-query'; -import { Link, createRoute } from '@tanstack/react-router'; -import { FileText, GitPullRequest } from 'lucide-react'; -import { useState } from 'react'; import { rootRoute } from '../__root.js'; type Tab = 'overview' | 'logs' | 'llm-calls' | 'debug'; diff --git a/web/src/routes/settings/general.tsx b/web/src/routes/settings/general.tsx index 5defa8d1..c575244c 100644 --- a/web/src/routes/settings/general.tsx +++ b/web/src/routes/settings/general.tsx @@ -1,5 +1,5 @@ -import { OrgForm } from '@/components/settings/org-form.js'; import { createRoute } from '@tanstack/react-router'; +import { OrgForm } from '@/components/settings/org-form.js'; import { rootRoute } from '../__root.js'; function GeneralSettingsPage() { diff --git a/web/src/routes/settings/users.tsx b/web/src/routes/settings/users.tsx index ec2f741a..88a61021 100644 --- a/web/src/routes/settings/users.tsx +++ b/web/src/routes/settings/users.tsx @@ -1,9 +1,9 @@ -import { UserFormDialog } from '@/components/settings/user-form-dialog.js'; -import { UsersTable } from '@/components/settings/users-table.js'; -import { trpc } from '@/lib/trpc.js'; import { useQuery } from '@tanstack/react-query'; import { createRoute } from '@tanstack/react-router'; import { useState } from 'react'; +import { UserFormDialog } from '@/components/settings/user-form-dialog.js'; +import { UsersTable } from '@/components/settings/users-table.js'; +import { trpc } from '@/lib/trpc.js'; import { rootRoute } from '../__root.js'; function UsersPage() { diff --git a/web/src/routes/work-items/$projectId.$workItemId.tsx b/web/src/routes/work-items/$projectId.$workItemId.tsx index 8ce121e3..9b1d487f 100644 --- a/web/src/routes/work-items/$projectId.$workItemId.tsx +++ b/web/src/routes/work-items/$projectId.$workItemId.tsx @@ -1,10 +1,10 @@ +import { useQuery } from '@tanstack/react-query'; +import { createRoute } from '@tanstack/react-router'; +import { ExternalLink } from 'lucide-react'; import { WorkItemCostChart } from '@/components/runs/work-item-cost-chart.js'; import { WorkItemDurationChart } from '@/components/runs/work-item-duration-chart.js'; import { WorkItemRunsTable } from '@/components/runs/work-item-runs-table.js'; import { trpc } from '@/lib/trpc.js'; -import { useQuery } from '@tanstack/react-query'; -import { createRoute } from '@tanstack/react-router'; -import { ExternalLink } from 'lucide-react'; import { rootRoute } from '../__root.js'; function WorkItemRunsPage() { From 4949ad4441fb1727c1e8bb9275afb02d22b058ce Mon Sep 17 00:00:00 2001 From: aaight Date: Thu, 2 Apr 2026 13:59:11 +0200 Subject: [PATCH 08/52] chore(deps): batch minor dependency updates across root and web packages (#1076) Co-authored-by: Cascade Bot --- package-lock.json | 2281 +++++++++++++++++++++++++++++------------ web/package-lock.json | 429 ++++---- web/package.json | 2 +- 3 files changed, 1835 insertions(+), 877 deletions(-) diff --git a/package-lock.json b/package-lock.json index 77a5c967..bae9da68 100644 --- a/package-lock.json +++ b/package-lock.json @@ -95,27 +95,52 @@ } }, "node_modules/@anthropic-ai/claude-agent-sdk": { - "version": "0.2.42", - "resolved": "https://registry.npmjs.org/@anthropic-ai/claude-agent-sdk/-/claude-agent-sdk-0.2.42.tgz", - "integrity": "sha512-/CugP7AjP57Dqtl2sbsDtxdbpQoPKIhjyF5WrTViGu4NHQdM+UikrRs4MhZ2jeotiC5R7iK9ZUN9SiBgcZ8oLw==", + "version": "0.2.90", + "resolved": "https://registry.npmjs.org/@anthropic-ai/claude-agent-sdk/-/claude-agent-sdk-0.2.90.tgz", + "integrity": "sha512-up5bK0pUbthKIZtNE18WDrIYi0KNpZUhdgjGbkfH/mFQJxI6W/uE3mTiLrCX3UF0SqNl0fMtojBTZPJr2b3O4g==", "license": "SEE LICENSE IN README.md", + "dependencies": { + "@anthropic-ai/sdk": "^0.74.0", + "@modelcontextprotocol/sdk": "^1.27.1" + }, "engines": { "node": ">=18.0.0" }, "optionalDependencies": { - "@img/sharp-darwin-arm64": "^0.33.5", - "@img/sharp-darwin-x64": "^0.33.5", - "@img/sharp-linux-arm": "^0.33.5", - "@img/sharp-linux-arm64": "^0.33.5", - "@img/sharp-linux-x64": "^0.33.5", - "@img/sharp-linuxmusl-arm64": "^0.33.5", - "@img/sharp-linuxmusl-x64": "^0.33.5", - "@img/sharp-win32-x64": "^0.33.5" + "@img/sharp-darwin-arm64": "^0.34.2", + "@img/sharp-darwin-x64": "^0.34.2", + "@img/sharp-linux-arm": "^0.34.2", + "@img/sharp-linux-arm64": "^0.34.2", + "@img/sharp-linux-x64": "^0.34.2", + "@img/sharp-linuxmusl-arm64": "^0.34.2", + "@img/sharp-linuxmusl-x64": "^0.34.2", + "@img/sharp-win32-arm64": "^0.34.2", + "@img/sharp-win32-x64": "^0.34.2" }, "peerDependencies": { "zod": "^4.0.0" } }, + "node_modules/@anthropic-ai/claude-agent-sdk/node_modules/@anthropic-ai/sdk": { + "version": "0.74.0", + "resolved": "https://registry.npmjs.org/@anthropic-ai/sdk/-/sdk-0.74.0.tgz", + "integrity": "sha512-srbJV7JKsc5cQ6eVuFzjZO7UR3xEPJqPamHFIe29bs38Ij2IripoAhC0S5NslNbaFUYqBKypmmpzMTpqfHEUDw==", + "license": "MIT", + "dependencies": { + "json-schema-to-ts": "^3.1.1" + }, + "bin": { + "anthropic-ai-sdk": "bin/cli" + }, + "peerDependencies": { + "zod": "^3.25.0 || ^4.0.0" + }, + "peerDependenciesMeta": { + "zod": { + "optional": true + } + } + }, "node_modules/@anthropic-ai/sdk": { "version": "0.69.0", "resolved": "https://registry.npmjs.org/@anthropic-ai/sdk/-/sdk-0.69.0.tgz", @@ -136,23 +161,6 @@ } } }, - "node_modules/@apm-js-collab/code-transformer": { - "version": "0.8.2", - "resolved": "https://registry.npmjs.org/@apm-js-collab/code-transformer/-/code-transformer-0.8.2.tgz", - "integrity": "sha512-YRjJjNq5KFSjDUoqu5pFUWrrsvGOxl6c3bu+uMFc9HNNptZ2rNU/TI2nLw4jnhQNtka972Ee2m3uqbvDQtPeCA==", - "license": "Apache-2.0" - }, - "node_modules/@apm-js-collab/tracing-hooks": { - "version": "0.3.1", - "resolved": "https://registry.npmjs.org/@apm-js-collab/tracing-hooks/-/tracing-hooks-0.3.1.tgz", - "integrity": "sha512-Vu1CbmPURlN5fTboVuKMoJjbO5qcq9fA5YXpskx3dXe/zTBvjODFoerw+69rVBlRLrJpwPqSDqEuJDEKIrTldw==", - "license": "Apache-2.0", - "dependencies": { - "@apm-js-collab/code-transformer": "^0.8.0", - "debug": "^4.4.1", - "module-details-from-path": "^1.0.4" - } - }, "node_modules/@asamuzakjp/css-color": { "version": "4.1.2", "resolved": "https://registry.npmjs.org/@asamuzakjp/css-color/-/css-color-4.1.2.tgz", @@ -502,15 +510,17 @@ } }, "node_modules/@commitlint/cli": { - "version": "20.3.0", + "version": "20.5.0", + "resolved": "https://registry.npmjs.org/@commitlint/cli/-/cli-20.5.0.tgz", + "integrity": "sha512-yNkyN/tuKTJS3wdVfsZ2tXDM4G4Gi7z+jW54Cki8N8tZqwKBltbIvUUrSbT4hz1bhW/h0CdR+5sCSpXD+wMKaQ==", "dev": true, "license": "MIT", "dependencies": { - "@commitlint/format": "^20.2.0", - "@commitlint/lint": "^20.3.0", - "@commitlint/load": "^20.3.0", - "@commitlint/read": "^20.2.0", - "@commitlint/types": "^20.2.0", + "@commitlint/format": "^20.5.0", + "@commitlint/lint": "^20.5.0", + "@commitlint/load": "^20.5.0", + "@commitlint/read": "^20.5.0", + "@commitlint/types": "^20.5.0", "tinyexec": "^1.0.0", "yargs": "^17.0.0" }, @@ -568,23 +578,27 @@ } }, "node_modules/@commitlint/config-conventional": { - "version": "20.3.0", + "version": "20.5.0", + "resolved": "https://registry.npmjs.org/@commitlint/config-conventional/-/config-conventional-20.5.0.tgz", + "integrity": "sha512-t3Ni88rFw1XMa4nZHgOKJ8fIAT9M2j5TnKyTqJzsxea7FUetlNdYFus9dz+MhIRZmc16P0PPyEfh6X2d/qw8SA==", "dev": true, "license": "MIT", "dependencies": { - "@commitlint/types": "^20.2.0", - "conventional-changelog-conventionalcommits": "^7.0.2" + "@commitlint/types": "^20.5.0", + "conventional-changelog-conventionalcommits": "^9.2.0" }, "engines": { "node": ">=v18" } }, "node_modules/@commitlint/config-validator": { - "version": "20.2.0", + "version": "20.5.0", + "resolved": "https://registry.npmjs.org/@commitlint/config-validator/-/config-validator-20.5.0.tgz", + "integrity": "sha512-T/Uh6iJUzyx7j35GmHWdIiGRQB+ouZDk0pwAaYq4SXgB54KZhFdJ0vYmxiW6AMYICTIWuyMxDBl1jK74oFp/Gw==", "dev": true, "license": "MIT", "dependencies": { - "@commitlint/types": "^20.2.0", + "@commitlint/types": "^20.5.0", "ajv": "^8.11.0" }, "engines": { @@ -592,11 +606,13 @@ } }, "node_modules/@commitlint/ensure": { - "version": "20.2.0", + "version": "20.5.0", + "resolved": "https://registry.npmjs.org/@commitlint/ensure/-/ensure-20.5.0.tgz", + "integrity": "sha512-IpHqAUesBeW1EDDdjzJeaOxU9tnogLAyXLRBn03SHlj1SGENn2JGZqSWGkFvBJkJzfXAuCNtsoYzax+ZPS+puw==", "dev": true, "license": "MIT", "dependencies": { - "@commitlint/types": "^20.2.0", + "@commitlint/types": "^20.5.0", "lodash.camelcase": "^4.3.0", "lodash.kebabcase": "^4.1.1", "lodash.snakecase": "^4.1.1", @@ -609,6 +625,8 @@ }, "node_modules/@commitlint/execute-rule": { "version": "20.0.0", + "resolved": "https://registry.npmjs.org/@commitlint/execute-rule/-/execute-rule-20.0.0.tgz", + "integrity": "sha512-xyCoOShoPuPL44gVa+5EdZsBVao/pNzpQhkzq3RdtlFdKZtjWcLlUFQHSWBuhk5utKYykeJPSz2i8ABHQA+ZZw==", "dev": true, "license": "MIT", "engines": { @@ -616,23 +634,27 @@ } }, "node_modules/@commitlint/format": { - "version": "20.2.0", + "version": "20.5.0", + "resolved": "https://registry.npmjs.org/@commitlint/format/-/format-20.5.0.tgz", + "integrity": "sha512-TI9EwFU/qZWSK7a5qyXMpKPPv3qta7FO4tKW+Wt2al7sgMbLWTsAcDpX1cU8k16TRdsiiet9aOw0zpvRXNJu7Q==", "dev": true, "license": "MIT", "dependencies": { - "@commitlint/types": "^20.2.0", - "chalk": "^5.3.0" + "@commitlint/types": "^20.5.0", + "picocolors": "^1.1.1" }, "engines": { "node": ">=v18" } }, "node_modules/@commitlint/is-ignored": { - "version": "20.2.0", + "version": "20.5.0", + "resolved": "https://registry.npmjs.org/@commitlint/is-ignored/-/is-ignored-20.5.0.tgz", + "integrity": "sha512-JWLarAsurHJhPozbuAH6GbP4p/hdOCoqS9zJMfqwswne+/GPs5V0+rrsfOkP68Y8PSLphwtFXV0EzJ+GTXTTGg==", "dev": true, "license": "MIT", "dependencies": { - "@commitlint/types": "^20.2.0", + "@commitlint/types": "^20.5.0", "semver": "^7.6.0" }, "engines": { @@ -640,41 +662,46 @@ } }, "node_modules/@commitlint/lint": { - "version": "20.3.0", + "version": "20.5.0", + "resolved": "https://registry.npmjs.org/@commitlint/lint/-/lint-20.5.0.tgz", + "integrity": "sha512-jiM3hNUdu04jFBf1VgPdjtIPvbuVfDTBAc6L98AWcoLjF5sYqkulBHBzlVWll4rMF1T5zeQFB6r//a+s+BBKlA==", "dev": true, "license": "MIT", "dependencies": { - "@commitlint/is-ignored": "^20.2.0", - "@commitlint/parse": "^20.2.0", - "@commitlint/rules": "^20.3.0", - "@commitlint/types": "^20.2.0" + "@commitlint/is-ignored": "^20.5.0", + "@commitlint/parse": "^20.5.0", + "@commitlint/rules": "^20.5.0", + "@commitlint/types": "^20.5.0" }, "engines": { "node": ">=v18" } }, "node_modules/@commitlint/load": { - "version": "20.3.0", + "version": "20.5.0", + "resolved": "https://registry.npmjs.org/@commitlint/load/-/load-20.5.0.tgz", + "integrity": "sha512-sLhhYTL/KxeOTZjjabKDhwidGZan84XKK1+XFkwDYL/4883kIajcz/dZFAhBJmZPtL8+nBx6bnkzA95YxPeDPw==", "dev": true, "license": "MIT", "dependencies": { - "@commitlint/config-validator": "^20.2.0", + "@commitlint/config-validator": "^20.5.0", "@commitlint/execute-rule": "^20.0.0", - "@commitlint/resolve-extends": "^20.2.0", - "@commitlint/types": "^20.2.0", - "chalk": "^5.3.0", - "cosmiconfig": "^9.0.0", + "@commitlint/resolve-extends": "^20.5.0", + "@commitlint/types": "^20.5.0", + "cosmiconfig": "^9.0.1", "cosmiconfig-typescript-loader": "^6.1.0", - "lodash.isplainobject": "^4.0.6", - "lodash.merge": "^4.6.2", - "lodash.uniq": "^4.5.0" + "is-plain-obj": "^4.1.0", + "lodash.mergewith": "^4.6.2", + "picocolors": "^1.1.1" }, "engines": { "node": ">=v18" } }, "node_modules/@commitlint/message": { - "version": "20.0.0", + "version": "20.4.3", + "resolved": "https://registry.npmjs.org/@commitlint/message/-/message-20.4.3.tgz", + "integrity": "sha512-6akwCYrzcrFcTYz9GyUaWlhisY4lmQ3KvrnabmhoeAV8nRH4dXJAh4+EUQ3uArtxxKQkvxJS78hNX2EU3USgxQ==", "dev": true, "license": "MIT", "engines": { @@ -682,26 +709,30 @@ } }, "node_modules/@commitlint/parse": { - "version": "20.2.0", + "version": "20.5.0", + "resolved": "https://registry.npmjs.org/@commitlint/parse/-/parse-20.5.0.tgz", + "integrity": "sha512-SeKWHBMk7YOTnnEWUhx+d1a9vHsjjuo6Uo1xRfPNfeY4bdYFasCH1dDpAv13Lyn+dDPOels+jP6D2GRZqzc5fA==", "dev": true, "license": "MIT", "dependencies": { - "@commitlint/types": "^20.2.0", - "conventional-changelog-angular": "^7.0.0", - "conventional-commits-parser": "^5.0.0" + "@commitlint/types": "^20.5.0", + "conventional-changelog-angular": "^8.2.0", + "conventional-commits-parser": "^6.3.0" }, "engines": { "node": ">=v18" } }, "node_modules/@commitlint/read": { - "version": "20.2.0", + "version": "20.5.0", + "resolved": "https://registry.npmjs.org/@commitlint/read/-/read-20.5.0.tgz", + "integrity": "sha512-JDEIJ2+GnWpK8QqwfmW7O42h0aycJEWNqcdkJnyzLD11nf9dW2dWLTVEa8Wtlo4IZFGLPATjR5neA5QlOvIH1w==", "dev": true, "license": "MIT", "dependencies": { - "@commitlint/top-level": "^20.0.0", - "@commitlint/types": "^20.2.0", - "git-raw-commits": "^4.0.0", + "@commitlint/top-level": "^20.4.3", + "@commitlint/types": "^20.5.0", + "git-raw-commits": "^5.0.0", "minimist": "^1.2.8", "tinyexec": "^1.0.0" }, @@ -710,7 +741,9 @@ } }, "node_modules/@commitlint/read/node_modules/tinyexec": { - "version": "1.0.2", + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/tinyexec/-/tinyexec-1.0.4.tgz", + "integrity": "sha512-u9r3uZC0bdpGOXtlxUIdwf9pkmvhqJdrVCH9fapQtgy/OeTTMZ1nqH7agtvEfmGui6e1XxjcdrlxvxJvc3sMqw==", "dev": true, "license": "MIT", "engines": { @@ -718,12 +751,14 @@ } }, "node_modules/@commitlint/resolve-extends": { - "version": "20.2.0", + "version": "20.5.0", + "resolved": "https://registry.npmjs.org/@commitlint/resolve-extends/-/resolve-extends-20.5.0.tgz", + "integrity": "sha512-3SHPWUW2v0tyspCTcfSsYml0gses92l6TlogwzvM2cbxDgmhSRc+fldDjvGkCXJrjSM87BBaWYTPWwwyASZRrg==", "dev": true, "license": "MIT", "dependencies": { - "@commitlint/config-validator": "^20.2.0", - "@commitlint/types": "^20.2.0", + "@commitlint/config-validator": "^20.5.0", + "@commitlint/types": "^20.5.0", "global-directory": "^4.0.1", "import-meta-resolve": "^4.0.0", "lodash.mergewith": "^4.6.2", @@ -734,14 +769,16 @@ } }, "node_modules/@commitlint/rules": { - "version": "20.3.0", + "version": "20.5.0", + "resolved": "https://registry.npmjs.org/@commitlint/rules/-/rules-20.5.0.tgz", + "integrity": "sha512-5NdQXQEdnDPT5pK8O39ZA7HohzPRHEsDGU23cyVCNPQy4WegAbAwrQk3nIu7p2sl3dutPk8RZd91yKTrMTnRkQ==", "dev": true, "license": "MIT", "dependencies": { - "@commitlint/ensure": "^20.2.0", - "@commitlint/message": "^20.0.0", + "@commitlint/ensure": "^20.5.0", + "@commitlint/message": "^20.4.3", "@commitlint/to-lines": "^20.0.0", - "@commitlint/types": "^20.2.0" + "@commitlint/types": "^20.5.0" }, "engines": { "node": ">=v18" @@ -749,6 +786,8 @@ }, "node_modules/@commitlint/to-lines": { "version": "20.0.0", + "resolved": "https://registry.npmjs.org/@commitlint/to-lines/-/to-lines-20.0.0.tgz", + "integrity": "sha512-2l9gmwiCRqZNWgV+pX1X7z4yP0b3ex/86UmUFgoRt672Ez6cAM2lOQeHFRUTuE6sPpi8XBCGnd8Kh3bMoyHwJw==", "dev": true, "license": "MIT", "engines": { @@ -756,28 +795,59 @@ } }, "node_modules/@commitlint/top-level": { - "version": "20.0.0", + "version": "20.4.3", + "resolved": "https://registry.npmjs.org/@commitlint/top-level/-/top-level-20.4.3.tgz", + "integrity": "sha512-qD9xfP6dFg5jQ3NMrOhG0/w5y3bBUsVGyJvXxdWEwBm8hyx4WOk3kKXw28T5czBYvyeCVJgJJ6aoJZUWDpaacQ==", "dev": true, "license": "MIT", "dependencies": { - "find-up": "^7.0.0" + "escalade": "^3.2.0" }, "engines": { "node": ">=v18" } }, "node_modules/@commitlint/types": { - "version": "20.2.0", + "version": "20.5.0", + "resolved": "https://registry.npmjs.org/@commitlint/types/-/types-20.5.0.tgz", + "integrity": "sha512-ZJoS8oSq2CAZEpc/YI9SulLrdiIyXeHb/OGqGrkUP6Q7YV+0ouNAa7GjqRdXeQPncHQIDz/jbCTlHScvYvO/gA==", "dev": true, "license": "MIT", "dependencies": { - "@types/conventional-commits-parser": "^5.0.0", - "chalk": "^5.3.0" + "conventional-commits-parser": "^6.3.0", + "picocolors": "^1.1.1" }, "engines": { "node": ">=v18" } }, + "node_modules/@conventional-changelog/git-client": { + "version": "2.6.0", + "resolved": "https://registry.npmjs.org/@conventional-changelog/git-client/-/git-client-2.6.0.tgz", + "integrity": "sha512-T+uPDciKf0/ioNNDpMGc8FDsehJClZP0yR3Q5MN6wE/Y/1QZ7F+80OgznnTCOlMEG4AV0LvH2UJi3C/nBnaBUg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@simple-libs/child-process-utils": "^1.0.0", + "@simple-libs/stream-utils": "^1.2.0", + "semver": "^7.5.2" + }, + "engines": { + "node": ">=18" + }, + "peerDependencies": { + "conventional-commits-filter": "^5.0.0", + "conventional-commits-parser": "^6.3.0" + }, + "peerDependenciesMeta": { + "conventional-commits-filter": { + "optional": true + }, + "conventional-commits-parser": { + "optional": true + } + } + }, "node_modules/@csstools/color-helpers": { "version": "6.0.1", "resolved": "https://registry.npmjs.org/@csstools/color-helpers/-/color-helpers-6.0.1.tgz", @@ -1811,6 +1881,72 @@ } } }, + "node_modules/@fastify/otel": { + "version": "0.18.0", + "resolved": "https://registry.npmjs.org/@fastify/otel/-/otel-0.18.0.tgz", + "integrity": "sha512-3TASCATfw+ctICSb4ymrv7iCm0qJ0N9CarB+CZ7zIJ7KqNbwI5JjyDL1/sxoC0ccTO1Zyd1iQ+oqncPg5FJXaA==", + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/fastify" + }, + { + "type": "opencollective", + "url": "https://opencollective.com/fastify" + } + ], + "license": "MIT", + "dependencies": { + "@opentelemetry/core": "^2.0.0", + "@opentelemetry/instrumentation": "^0.212.0", + "@opentelemetry/semantic-conventions": "^1.28.0", + "minimatch": "^10.2.4" + }, + "peerDependencies": { + "@opentelemetry/api": "^1.9.0" + } + }, + "node_modules/@fastify/otel/node_modules/@opentelemetry/api-logs": { + "version": "0.212.0", + "resolved": "https://registry.npmjs.org/@opentelemetry/api-logs/-/api-logs-0.212.0.tgz", + "integrity": "sha512-TEEVrLbNROUkYY51sBJGk7lO/OLjuepch8+hmpM6ffMJQ2z/KVCjdHuCFX6fJj8OkJP2zckPjrJzQtXU3IAsFg==", + "license": "Apache-2.0", + "dependencies": { + "@opentelemetry/api": "^1.3.0" + }, + "engines": { + "node": ">=8.0.0" + } + }, + "node_modules/@fastify/otel/node_modules/@opentelemetry/instrumentation": { + "version": "0.212.0", + "resolved": "https://registry.npmjs.org/@opentelemetry/instrumentation/-/instrumentation-0.212.0.tgz", + "integrity": "sha512-IyXmpNnifNouMOe0I/gX7ENfv2ZCNdYTF0FpCsoBcpbIHzk81Ww9rQTYTnvghszCg7qGrIhNvWC8dhEifgX9Jg==", + "license": "Apache-2.0", + "dependencies": { + "@opentelemetry/api-logs": "0.212.0", + "import-in-the-middle": "^2.0.6", + "require-in-the-middle": "^8.0.0" + }, + "engines": { + "node": "^18.19.0 || >=20.6.0" + }, + "peerDependencies": { + "@opentelemetry/api": "^1.3.0" + } + }, + "node_modules/@fastify/otel/node_modules/import-in-the-middle": { + "version": "2.0.6", + "resolved": "https://registry.npmjs.org/import-in-the-middle/-/import-in-the-middle-2.0.6.tgz", + "integrity": "sha512-3vZV3jX0XRFW3EJDTwzWoZa+RH1b8eTTx6YOCjglrLyPuepwoBti1k3L2dKwdCUrnVEfc5CuRuGstaC/uQJJaw==", + "license": "Apache-2.0", + "dependencies": { + "acorn": "^8.15.0", + "acorn-import-attributes": "^1.9.5", + "cjs-module-lexer": "^2.2.0", + "module-details-from-path": "^1.0.4" + } + }, "node_modules/@google/genai": { "version": "1.44.0", "resolved": "https://registry.npmjs.org/@google/genai/-/genai-1.44.0.tgz", @@ -1961,9 +2097,9 @@ } }, "node_modules/@hono/node-server": { - "version": "1.19.11", - "resolved": "https://registry.npmjs.org/@hono/node-server/-/node-server-1.19.11.tgz", - "integrity": "sha512-dr8/3zEaB+p0D2n/IUrlPF1HZm586qgJNXK1a9fhg/PzdtkK7Ksd5l312tJX2yBuALqDYBlG20QEbayqPyxn+g==", + "version": "1.19.12", + "resolved": "https://registry.npmjs.org/@hono/node-server/-/node-server-1.19.12.tgz", + "integrity": "sha512-txsUW4SQ1iilgE0l9/e9VQWmELXifEFvmdA1j6WFh/aFPj99hIntrSsq/if0UWyGVkmrRPKA1wCeP+UCr1B9Uw==", "license": "MIT", "engines": { "node": ">=18.14.1" @@ -1986,9 +2122,9 @@ } }, "node_modules/@img/sharp-darwin-arm64": { - "version": "0.33.5", - "resolved": "https://registry.npmjs.org/@img/sharp-darwin-arm64/-/sharp-darwin-arm64-0.33.5.tgz", - "integrity": "sha512-UT4p+iz/2H4twwAoLCqfA9UH5pI6DggwKEGuaPy7nCVQ8ZsiY5PIcrRvD1DzuY3qYL07NtIQcWnBSY/heikIFQ==", + "version": "0.34.5", + "resolved": "https://registry.npmjs.org/@img/sharp-darwin-arm64/-/sharp-darwin-arm64-0.34.5.tgz", + "integrity": "sha512-imtQ3WMJXbMY4fxb/Ndp6HBTNVtWCUI0WdobyheGf5+ad6xX8VIDO8u2xE4qc/fr08CKG/7dDseFtn6M6g/r3w==", "cpu": [ "arm64" ], @@ -2004,13 +2140,13 @@ "url": "https://opencollective.com/libvips" }, "optionalDependencies": { - "@img/sharp-libvips-darwin-arm64": "1.0.4" + "@img/sharp-libvips-darwin-arm64": "1.2.4" } }, "node_modules/@img/sharp-darwin-x64": { - "version": "0.33.5", - "resolved": "https://registry.npmjs.org/@img/sharp-darwin-x64/-/sharp-darwin-x64-0.33.5.tgz", - "integrity": "sha512-fyHac4jIc1ANYGRDxtiqelIbdWkIuQaI84Mv45KvGRRxSAa7o7d1ZKAOBaYbnepLC1WqxfpimdeWfvqqSGwR2Q==", + "version": "0.34.5", + "resolved": "https://registry.npmjs.org/@img/sharp-darwin-x64/-/sharp-darwin-x64-0.34.5.tgz", + "integrity": "sha512-YNEFAF/4KQ/PeW0N+r+aVVsoIY0/qxxikF2SWdp+NRkmMB7y9LBZAVqQ4yhGCm/H3H270OSykqmQMKLBhBJDEw==", "cpu": [ "x64" ], @@ -2026,13 +2162,13 @@ "url": "https://opencollective.com/libvips" }, "optionalDependencies": { - "@img/sharp-libvips-darwin-x64": "1.0.4" + "@img/sharp-libvips-darwin-x64": "1.2.4" } }, "node_modules/@img/sharp-libvips-darwin-arm64": { - "version": "1.0.4", - "resolved": "https://registry.npmjs.org/@img/sharp-libvips-darwin-arm64/-/sharp-libvips-darwin-arm64-1.0.4.tgz", - "integrity": "sha512-XblONe153h0O2zuFfTAbQYAX2JhYmDHeWikp1LM9Hul9gVPjFY427k6dFEcOL72O01QxQsWi761svJ/ev9xEDg==", + "version": "1.2.4", + "resolved": "https://registry.npmjs.org/@img/sharp-libvips-darwin-arm64/-/sharp-libvips-darwin-arm64-1.2.4.tgz", + "integrity": "sha512-zqjjo7RatFfFoP0MkQ51jfuFZBnVE2pRiaydKJ1G/rHZvnsrHAOcQALIi9sA5co5xenQdTugCvtb1cuf78Vf4g==", "cpu": [ "arm64" ], @@ -2046,9 +2182,9 @@ } }, "node_modules/@img/sharp-libvips-darwin-x64": { - "version": "1.0.4", - "resolved": "https://registry.npmjs.org/@img/sharp-libvips-darwin-x64/-/sharp-libvips-darwin-x64-1.0.4.tgz", - "integrity": "sha512-xnGR8YuZYfJGmWPvmlunFaWJsb9T/AO2ykoP3Fz/0X5XV2aoYBPkX6xqCQvUTKKiLddarLaxpzNe+b1hjeWHAQ==", + "version": "1.2.4", + "resolved": "https://registry.npmjs.org/@img/sharp-libvips-darwin-x64/-/sharp-libvips-darwin-x64-1.2.4.tgz", + "integrity": "sha512-1IOd5xfVhlGwX+zXv2N93k0yMONvUlANylbJw1eTah8K/Jtpi15KC+WSiaX/nBmbm2HxRM1gZ0nSdjSsrZbGKg==", "cpu": [ "x64" ], @@ -2062,9 +2198,9 @@ } }, "node_modules/@img/sharp-libvips-linux-arm": { - "version": "1.0.5", - "resolved": "https://registry.npmjs.org/@img/sharp-libvips-linux-arm/-/sharp-libvips-linux-arm-1.0.5.tgz", - "integrity": "sha512-gvcC4ACAOPRNATg/ov8/MnbxFDJqf/pDePbBnuBDcjsI8PssmjoKMAz4LtLaVi+OnSb5FK/yIOamqDwGmXW32g==", + "version": "1.2.4", + "resolved": "https://registry.npmjs.org/@img/sharp-libvips-linux-arm/-/sharp-libvips-linux-arm-1.2.4.tgz", + "integrity": "sha512-bFI7xcKFELdiNCVov8e44Ia4u2byA+l3XtsAj+Q8tfCwO6BQ8iDojYdvoPMqsKDkuoOo+X6HZA0s0q11ANMQ8A==", "cpu": [ "arm" ], @@ -2078,9 +2214,9 @@ } }, "node_modules/@img/sharp-libvips-linux-arm64": { - "version": "1.0.4", - "resolved": "https://registry.npmjs.org/@img/sharp-libvips-linux-arm64/-/sharp-libvips-linux-arm64-1.0.4.tgz", - "integrity": "sha512-9B+taZ8DlyyqzZQnoeIvDVR/2F4EbMepXMc/NdVbkzsJbzkUjhXv/70GQJ7tdLA4YJgNP25zukcxpX2/SueNrA==", + "version": "1.2.4", + "resolved": "https://registry.npmjs.org/@img/sharp-libvips-linux-arm64/-/sharp-libvips-linux-arm64-1.2.4.tgz", + "integrity": "sha512-excjX8DfsIcJ10x1Kzr4RcWe1edC9PquDRRPx3YVCvQv+U5p7Yin2s32ftzikXojb1PIFc/9Mt28/y+iRklkrw==", "cpu": [ "arm64" ], @@ -2094,9 +2230,9 @@ } }, "node_modules/@img/sharp-libvips-linux-x64": { - "version": "1.0.4", - "resolved": "https://registry.npmjs.org/@img/sharp-libvips-linux-x64/-/sharp-libvips-linux-x64-1.0.4.tgz", - "integrity": "sha512-MmWmQ3iPFZr0Iev+BAgVMb3ZyC4KeFc3jFxnNbEPas60e1cIfevbtuyf9nDGIzOaW9PdnDciJm+wFFaTlj5xYw==", + "version": "1.2.4", + "resolved": "https://registry.npmjs.org/@img/sharp-libvips-linux-x64/-/sharp-libvips-linux-x64-1.2.4.tgz", + "integrity": "sha512-tJxiiLsmHc9Ax1bz3oaOYBURTXGIRDODBqhveVHonrHJ9/+k89qbLl0bcJns+e4t4rvaNBxaEZsFtSfAdquPrw==", "cpu": [ "x64" ], @@ -2110,9 +2246,9 @@ } }, "node_modules/@img/sharp-libvips-linuxmusl-arm64": { - "version": "1.0.4", - "resolved": "https://registry.npmjs.org/@img/sharp-libvips-linuxmusl-arm64/-/sharp-libvips-linuxmusl-arm64-1.0.4.tgz", - "integrity": "sha512-9Ti+BbTYDcsbp4wfYib8Ctm1ilkugkA/uscUn6UXK1ldpC1JjiXbLfFZtRlBhjPZ5o1NCLiDbg8fhUPKStHoTA==", + "version": "1.2.4", + "resolved": "https://registry.npmjs.org/@img/sharp-libvips-linuxmusl-arm64/-/sharp-libvips-linuxmusl-arm64-1.2.4.tgz", + "integrity": "sha512-FVQHuwx1IIuNow9QAbYUzJ+En8KcVm9Lk5+uGUQJHaZmMECZmOlix9HnH7n1TRkXMS0pGxIJokIVB9SuqZGGXw==", "cpu": [ "arm64" ], @@ -2126,9 +2262,9 @@ } }, "node_modules/@img/sharp-libvips-linuxmusl-x64": { - "version": "1.0.4", - "resolved": "https://registry.npmjs.org/@img/sharp-libvips-linuxmusl-x64/-/sharp-libvips-linuxmusl-x64-1.0.4.tgz", - "integrity": "sha512-viYN1KX9m+/hGkJtvYYp+CCLgnJXwiQB39damAO7WMdKWlIhmYTfHjwSbQeUK/20vY154mwezd9HflVFM1wVSw==", + "version": "1.2.4", + "resolved": "https://registry.npmjs.org/@img/sharp-libvips-linuxmusl-x64/-/sharp-libvips-linuxmusl-x64-1.2.4.tgz", + "integrity": "sha512-+LpyBk7L44ZIXwz/VYfglaX/okxezESc6UxDSoyo2Ks6Jxc4Y7sGjpgU9s4PMgqgjj1gZCylTieNamqA1MF7Dg==", "cpu": [ "x64" ], @@ -2142,9 +2278,9 @@ } }, "node_modules/@img/sharp-linux-arm": { - "version": "0.33.5", - "resolved": "https://registry.npmjs.org/@img/sharp-linux-arm/-/sharp-linux-arm-0.33.5.tgz", - "integrity": "sha512-JTS1eldqZbJxjvKaAkxhZmBqPRGmxgu+qFKSInv8moZ2AmT5Yib3EQ1c6gp493HvrvV8QgdOXdyaIBrhvFhBMQ==", + "version": "0.34.5", + "resolved": "https://registry.npmjs.org/@img/sharp-linux-arm/-/sharp-linux-arm-0.34.5.tgz", + "integrity": "sha512-9dLqsvwtg1uuXBGZKsxem9595+ujv0sJ6Vi8wcTANSFpwV/GONat5eCkzQo/1O6zRIkh0m/8+5BjrRr7jDUSZw==", "cpu": [ "arm" ], @@ -2160,13 +2296,13 @@ "url": "https://opencollective.com/libvips" }, "optionalDependencies": { - "@img/sharp-libvips-linux-arm": "1.0.5" + "@img/sharp-libvips-linux-arm": "1.2.4" } }, "node_modules/@img/sharp-linux-arm64": { - "version": "0.33.5", - "resolved": "https://registry.npmjs.org/@img/sharp-linux-arm64/-/sharp-linux-arm64-0.33.5.tgz", - "integrity": "sha512-JMVv+AMRyGOHtO1RFBiJy/MBsgz0x4AWrT6QoEVVTyh1E39TrCUpTRI7mx9VksGX4awWASxqCYLCV4wBZHAYxA==", + "version": "0.34.5", + "resolved": "https://registry.npmjs.org/@img/sharp-linux-arm64/-/sharp-linux-arm64-0.34.5.tgz", + "integrity": "sha512-bKQzaJRY/bkPOXyKx5EVup7qkaojECG6NLYswgktOZjaXecSAeCWiZwwiFf3/Y+O1HrauiE3FVsGxFg8c24rZg==", "cpu": [ "arm64" ], @@ -2182,13 +2318,13 @@ "url": "https://opencollective.com/libvips" }, "optionalDependencies": { - "@img/sharp-libvips-linux-arm64": "1.0.4" + "@img/sharp-libvips-linux-arm64": "1.2.4" } }, "node_modules/@img/sharp-linux-x64": { - "version": "0.33.5", - "resolved": "https://registry.npmjs.org/@img/sharp-linux-x64/-/sharp-linux-x64-0.33.5.tgz", - "integrity": "sha512-opC+Ok5pRNAzuvq1AG0ar+1owsu842/Ab+4qvU879ippJBHvyY5n2mxF1izXqkPYlGuP/M556uh53jRLJmzTWA==", + "version": "0.34.5", + "resolved": "https://registry.npmjs.org/@img/sharp-linux-x64/-/sharp-linux-x64-0.34.5.tgz", + "integrity": "sha512-MEzd8HPKxVxVenwAa+JRPwEC7QFjoPWuS5NZnBt6B3pu7EG2Ge0id1oLHZpPJdn3OQK+BQDiw9zStiHBTJQQQQ==", "cpu": [ "x64" ], @@ -2204,13 +2340,13 @@ "url": "https://opencollective.com/libvips" }, "optionalDependencies": { - "@img/sharp-libvips-linux-x64": "1.0.4" + "@img/sharp-libvips-linux-x64": "1.2.4" } }, "node_modules/@img/sharp-linuxmusl-arm64": { - "version": "0.33.5", - "resolved": "https://registry.npmjs.org/@img/sharp-linuxmusl-arm64/-/sharp-linuxmusl-arm64-0.33.5.tgz", - "integrity": "sha512-XrHMZwGQGvJg2V/oRSUfSAfjfPxO+4DkiRh6p2AFjLQztWUuY/o8Mq0eMQVIY7HJ1CDQUJlxGGZRw1a5bqmd1g==", + "version": "0.34.5", + "resolved": "https://registry.npmjs.org/@img/sharp-linuxmusl-arm64/-/sharp-linuxmusl-arm64-0.34.5.tgz", + "integrity": "sha512-fprJR6GtRsMt6Kyfq44IsChVZeGN97gTD331weR1ex1c1rypDEABN6Tm2xa1wE6lYb5DdEnk03NZPqA7Id21yg==", "cpu": [ "arm64" ], @@ -2226,13 +2362,13 @@ "url": "https://opencollective.com/libvips" }, "optionalDependencies": { - "@img/sharp-libvips-linuxmusl-arm64": "1.0.4" + "@img/sharp-libvips-linuxmusl-arm64": "1.2.4" } }, "node_modules/@img/sharp-linuxmusl-x64": { - "version": "0.33.5", - "resolved": "https://registry.npmjs.org/@img/sharp-linuxmusl-x64/-/sharp-linuxmusl-x64-0.33.5.tgz", - "integrity": "sha512-WT+d/cgqKkkKySYmqoZ8y3pxx7lx9vVejxW/W4DOFMYVSkErR+w7mf2u8m/y4+xHe7yY9DAXQMWQhpnMuFfScw==", + "version": "0.34.5", + "resolved": "https://registry.npmjs.org/@img/sharp-linuxmusl-x64/-/sharp-linuxmusl-x64-0.34.5.tgz", + "integrity": "sha512-Jg8wNT1MUzIvhBFxViqrEhWDGzqymo3sV7z7ZsaWbZNDLXRJZoRGrjulp60YYtV4wfY8VIKcWidjojlLcWrd8Q==", "cpu": [ "x64" ], @@ -2248,13 +2384,32 @@ "url": "https://opencollective.com/libvips" }, "optionalDependencies": { - "@img/sharp-libvips-linuxmusl-x64": "1.0.4" + "@img/sharp-libvips-linuxmusl-x64": "1.2.4" + } + }, + "node_modules/@img/sharp-win32-arm64": { + "version": "0.34.5", + "resolved": "https://registry.npmjs.org/@img/sharp-win32-arm64/-/sharp-win32-arm64-0.34.5.tgz", + "integrity": "sha512-WQ3AgWCWYSb2yt+IG8mnC6Jdk9Whs7O0gxphblsLvdhSpSTtmu69ZG1Gkb6NuvxsNACwiPV6cNSZNzt0KPsw7g==", + "cpu": [ + "arm64" + ], + "license": "Apache-2.0 AND LGPL-3.0-or-later", + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": "^18.17.0 || ^20.3.0 || >=21.0.0" + }, + "funding": { + "url": "https://opencollective.com/libvips" } }, "node_modules/@img/sharp-win32-x64": { - "version": "0.33.5", - "resolved": "https://registry.npmjs.org/@img/sharp-win32-x64/-/sharp-win32-x64-0.33.5.tgz", - "integrity": "sha512-MpY/o8/8kj+EcnxwvrP4aTJSWw/aZ7JIGR4aBeZkZw5B7/Jn+tY9/VNwtcoGmdT7GfggGIU4kygOMSbYnOrAbg==", + "version": "0.34.5", + "resolved": "https://registry.npmjs.org/@img/sharp-win32-x64/-/sharp-win32-x64-0.34.5.tgz", + "integrity": "sha512-+29YMsqY2/9eFEiW93eqWnuLcWcufowXewwSNIT6UwZdUUCrM3oFjMWH/Z6/TMmb4hlFenmfAVbpWeup2jryCw==", "cpu": [ "x64" ], @@ -2271,7 +2426,9 @@ } }, "node_modules/@ioredis/commands": { - "version": "1.4.0", + "version": "1.5.1", + "resolved": "https://registry.npmjs.org/@ioredis/commands/-/commands-1.5.1.tgz", + "integrity": "sha512-JH8ZL/ywcJyR9MmJ5BNqZllXNZQqQbnVZOqpPQqE1vHiFgAw4NHbvE0FOduNU8IX9babitBT46571OnPTT0Zcw==", "license": "MIT" }, "node_modules/@isaacs/cliui": { @@ -2394,9 +2551,9 @@ } }, "node_modules/@llmist/cli": { - "version": "16.1.0", - "resolved": "https://registry.npmjs.org/@llmist/cli/-/cli-16.1.0.tgz", - "integrity": "sha512-UO3294pwMeijqyo+3pg8m2dL9XZB4irDmKjHaSkY6Lk9YRH54E4HpqnaS1V23HWaKNQidNi9SAQXMGUqBOuemQ==", + "version": "16.2.0", + "resolved": "https://registry.npmjs.org/@llmist/cli/-/cli-16.2.0.tgz", + "integrity": "sha512-5OM1rX7TKY2ILfxEUTxqI9eRTftG16PU640CEzOZb7tyl4IWhnDqr9tnSFAlP7Q6FHPTI3zN2xDvwioCG/N9xA==", "hasInstallScript": true, "license": "MIT", "dependencies": { @@ -2409,7 +2566,7 @@ "jiti": "^2.6.1", "js-toml": "^1.0.2", "js-yaml": "^4.1.0", - "llmist": "^16.1.0", + "llmist": "^16.2.0", "marked": "^15.0.12", "marked-terminal": "^7.3.0", "zod": "^4.1.12" @@ -2435,6 +2592,46 @@ "url": "https://github.com/sponsors/colinhacks" } }, + "node_modules/@modelcontextprotocol/sdk": { + "version": "1.29.0", + "resolved": "https://registry.npmjs.org/@modelcontextprotocol/sdk/-/sdk-1.29.0.tgz", + "integrity": "sha512-zo37mZA9hJWpULgkRpowewez1y6ML5GsXJPY8FI0tBBCd77HEvza4jDqRKOXgHNn867PVGCyTdzqpz0izu5ZjQ==", + "license": "MIT", + "dependencies": { + "@hono/node-server": "^1.19.9", + "ajv": "^8.17.1", + "ajv-formats": "^3.0.1", + "content-type": "^1.0.5", + "cors": "^2.8.5", + "cross-spawn": "^7.0.5", + "eventsource": "^3.0.2", + "eventsource-parser": "^3.0.0", + "express": "^5.2.1", + "express-rate-limit": "^8.2.1", + "hono": "^4.11.4", + "jose": "^6.1.3", + "json-schema-typed": "^8.0.2", + "pkce-challenge": "^5.0.0", + "raw-body": "^3.0.0", + "zod": "^3.25 || ^4.0", + "zod-to-json-schema": "^3.25.1" + }, + "engines": { + "node": ">=18" + }, + "peerDependencies": { + "@cfworker/json-schema": "^4.1.1", + "zod": "^3.25 || ^4.0" + }, + "peerDependenciesMeta": { + "@cfworker/json-schema": { + "optional": true + }, + "zod": { + "optional": false + } + } + }, "node_modules/@msgpackr-extract/msgpackr-extract-darwin-arm64": { "version": "3.0.3", "cpu": [ @@ -2512,9 +2709,9 @@ ] }, "node_modules/@oclif/core": { - "version": "4.8.0", - "resolved": "https://registry.npmjs.org/@oclif/core/-/core-4.8.0.tgz", - "integrity": "sha512-jteNUQKgJHLHFbbz806aGZqf+RJJ7t4gwF4MYa8fCwCxQ8/klJNWc0MvaJiBebk7Mc+J39mdlsB4XraaCKznFw==", + "version": "4.10.3", + "resolved": "https://registry.npmjs.org/@oclif/core/-/core-4.10.3.tgz", + "integrity": "sha512-0mD8vcrrX5uRsxzvI8tbWmSVGngvZA/Qo6O0ZGvLPAWEauSf5GFniwgirhY0SkszuHwu0S1J1ivj/jHmqtIDuA==", "license": "MIT", "dependencies": { "ansi-escapes": "^4.3.2", @@ -2527,7 +2724,7 @@ "indent-string": "^4.0.0", "is-wsl": "^2.2.0", "lilconfig": "^3.1.3", - "minimatch": "^9.0.5", + "minimatch": "^10.2.4", "semver": "^7.7.3", "string-width": "^4.2.3", "supports-color": "^8", @@ -2555,21 +2752,6 @@ "url": "https://github.com/sponsors/sindresorhus" } }, - "node_modules/@oclif/core/node_modules/minimatch": { - "version": "9.0.9", - "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-9.0.9.tgz", - "integrity": "sha512-OBwBN9AL4dqmETlpS2zasx+vTeWclWzkblfZk7KTA5j3jeOONz/tRCnZomUyvNg83wL5Zv9Ss6HMJXAgL8R2Yg==", - "license": "ISC", - "dependencies": { - "brace-expansion": "^2.0.2" - }, - "engines": { - "node": ">=16 || 14 >=14.17" - }, - "funding": { - "url": "https://github.com/sponsors/isaacs" - } - }, "node_modules/@oclif/core/node_modules/supports-color": { "version": "8.1.1", "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-8.1.1.tgz", @@ -2716,24 +2898,24 @@ } }, "node_modules/@opencode-ai/sdk": { - "version": "1.2.24", - "resolved": "https://registry.npmjs.org/@opencode-ai/sdk/-/sdk-1.2.24.tgz", - "integrity": "sha512-MQamFkRl4B/3d6oIRLNpkYR2fcwet1V/ffKyOKJXWjtP/CT9PDJMtLpu6olVHjXKQi8zMNltwuMhv1QsNtRlZg==", + "version": "1.3.13", + "resolved": "https://registry.npmjs.org/@opencode-ai/sdk/-/sdk-1.3.13.tgz", + "integrity": "sha512-/M6HlNnba+xf1EId6qFb2tG0cvq0db3PCQDug1glrf8wYOU57LYNF8WvHX9zoDKPTMv0F+O4pcP/8J+WvDaxHA==", "license": "MIT" }, "node_modules/@opentelemetry/api": { - "version": "1.9.0", - "resolved": "https://registry.npmjs.org/@opentelemetry/api/-/api-1.9.0.tgz", - "integrity": "sha512-3giAOQvZiH5F9bMlMiv8+GSPMeqg0dbaeo58/0SlA9sxSqZhnUtxzX9/2FzyhS9sWQf5S0GJE0AKBrFqjpeYcg==", + "version": "1.9.1", + "resolved": "https://registry.npmjs.org/@opentelemetry/api/-/api-1.9.1.tgz", + "integrity": "sha512-gLyJlPHPZYdAk1JENA9LeHejZe1Ti77/pTeFm/nMXmQH/HFZlcS/O2XJB+L8fkbrNSqhdtlvjBVjxwUYanNH5Q==", "license": "Apache-2.0", "engines": { "node": ">=8.0.0" } }, "node_modules/@opentelemetry/api-logs": { - "version": "0.211.0", - "resolved": "https://registry.npmjs.org/@opentelemetry/api-logs/-/api-logs-0.211.0.tgz", - "integrity": "sha512-swFdZq8MCdmdR22jTVGQDhwqDzcI4M10nhjXkLr1EsIzXgZBqm4ZlmmcWsg3TSNf+3mzgOiqveXmBLZuDi2Lgg==", + "version": "0.214.0", + "resolved": "https://registry.npmjs.org/@opentelemetry/api-logs/-/api-logs-0.214.0.tgz", + "integrity": "sha512-40lSJeqYO8Uz2Yj7u94/SJWE/wONa7rmMKjI1ZcIjgf3MHNHv1OZUCrCETGuaRF62d5pQD1wKIW+L4lmSMTzZA==", "license": "Apache-2.0", "dependencies": { "@opentelemetry/api": "^1.3.0" @@ -2743,9 +2925,9 @@ } }, "node_modules/@opentelemetry/context-async-hooks": { - "version": "2.5.1", - "resolved": "https://registry.npmjs.org/@opentelemetry/context-async-hooks/-/context-async-hooks-2.5.1.tgz", - "integrity": "sha512-MHbu8XxCHcBn6RwvCt2Vpn1WnLMNECfNKYB14LI5XypcgH4IE0/DiVifVR9tAkwPMyLXN8dOoPJfya3IryLQVw==", + "version": "2.6.1", + "resolved": "https://registry.npmjs.org/@opentelemetry/context-async-hooks/-/context-async-hooks-2.6.1.tgz", + "integrity": "sha512-XHzhwRNkBpeP8Fs/qjGrAf9r9PRv67wkJQ/7ZPaBQQ68DYlTBBx5MF9LvPx7mhuXcDessKK2b+DcxqwpgkcivQ==", "license": "Apache-2.0", "engines": { "node": "^18.19.0 || >=20.6.0" @@ -2755,9 +2937,9 @@ } }, "node_modules/@opentelemetry/core": { - "version": "2.5.1", - "resolved": "https://registry.npmjs.org/@opentelemetry/core/-/core-2.5.1.tgz", - "integrity": "sha512-Dwlc+3HAZqpgTYq0MUyZABjFkcrKTePwuiFVLjahGD8cx3enqihmpAmdgNFO1R4m/sIe5afjJrA25Prqy4NXlA==", + "version": "2.6.1", + "resolved": "https://registry.npmjs.org/@opentelemetry/core/-/core-2.6.1.tgz", + "integrity": "sha512-8xHSGWpJP9wBxgBpnqGL0R3PbdWQndL1Qp50qrg71+B28zK5OQmUgcDKLJgzyAAV38t4tOyLMGDD60LneR5W8g==", "license": "Apache-2.0", "dependencies": { "@opentelemetry/semantic-conventions": "^1.29.0" @@ -2770,13 +2952,13 @@ } }, "node_modules/@opentelemetry/instrumentation": { - "version": "0.211.0", - "resolved": "https://registry.npmjs.org/@opentelemetry/instrumentation/-/instrumentation-0.211.0.tgz", - "integrity": "sha512-h0nrZEC/zvI994nhg7EgQ8URIHt0uDTwN90r3qQUdZORS455bbx+YebnGeEuFghUT0HlJSrLF4iHw67f+odY+Q==", + "version": "0.214.0", + "resolved": "https://registry.npmjs.org/@opentelemetry/instrumentation/-/instrumentation-0.214.0.tgz", + "integrity": "sha512-MHqEX5Dk59cqVah5LiARMACku7jXSVk9iVDWOea4x3cr7VfdByeDCURK6o1lntT1JS/Tsovw01UJrBhN3/uC5w==", "license": "Apache-2.0", "dependencies": { - "@opentelemetry/api-logs": "0.211.0", - "import-in-the-middle": "^2.0.0", + "@opentelemetry/api-logs": "0.214.0", + "import-in-the-middle": "^3.0.0", "require-in-the-middle": "^8.0.0" }, "engines": { @@ -2787,13 +2969,13 @@ } }, "node_modules/@opentelemetry/instrumentation-amqplib": { - "version": "0.58.0", - "resolved": "https://registry.npmjs.org/@opentelemetry/instrumentation-amqplib/-/instrumentation-amqplib-0.58.0.tgz", - "integrity": "sha512-fjpQtH18J6GxzUZ+cwNhWUpb71u+DzT7rFkg5pLssDGaEber91Y2WNGdpVpwGivfEluMlNMZumzjEqfg8DeKXQ==", + "version": "0.61.0", + "resolved": "https://registry.npmjs.org/@opentelemetry/instrumentation-amqplib/-/instrumentation-amqplib-0.61.0.tgz", + "integrity": "sha512-mCKoyTGfRNisge4br0NpOFSy2Z1NnEW8hbCJdUDdJFHrPqVzc4IIBPA/vX0U+LUcQqrQvJX+HMIU0dbDRe0i0Q==", "license": "Apache-2.0", "dependencies": { "@opentelemetry/core": "^2.0.0", - "@opentelemetry/instrumentation": "^0.211.0", + "@opentelemetry/instrumentation": "^0.214.0", "@opentelemetry/semantic-conventions": "^1.33.0" }, "engines": { @@ -2804,13 +2986,13 @@ } }, "node_modules/@opentelemetry/instrumentation-connect": { - "version": "0.54.0", - "resolved": "https://registry.npmjs.org/@opentelemetry/instrumentation-connect/-/instrumentation-connect-0.54.0.tgz", - "integrity": "sha512-43RmbhUhqt3uuPnc16cX6NsxEASEtn8z/cYV8Zpt6EP4p2h9s4FNuJ4Q9BbEQ2C0YlCCB/2crO1ruVz/hWt8fA==", + "version": "0.57.0", + "resolved": "https://registry.npmjs.org/@opentelemetry/instrumentation-connect/-/instrumentation-connect-0.57.0.tgz", + "integrity": "sha512-FMEBChnI4FLN5TE9DHwfH7QpNir1JzXno1uz/TAucVdLCyrG0jTrKIcNHt/i30A0M2AunNBCkcd8Ei26dIPKdg==", "license": "Apache-2.0", "dependencies": { "@opentelemetry/core": "^2.0.0", - "@opentelemetry/instrumentation": "^0.211.0", + "@opentelemetry/instrumentation": "^0.214.0", "@opentelemetry/semantic-conventions": "^1.27.0", "@types/connect": "3.4.38" }, @@ -2822,12 +3004,12 @@ } }, "node_modules/@opentelemetry/instrumentation-dataloader": { - "version": "0.28.0", - "resolved": "https://registry.npmjs.org/@opentelemetry/instrumentation-dataloader/-/instrumentation-dataloader-0.28.0.tgz", - "integrity": "sha512-ExXGBp0sUj8yhm6Znhf9jmuOaGDsYfDES3gswZnKr4MCqoBWQdEFn6EoDdt5u+RdbxQER+t43FoUihEfTSqsjA==", + "version": "0.31.0", + "resolved": "https://registry.npmjs.org/@opentelemetry/instrumentation-dataloader/-/instrumentation-dataloader-0.31.0.tgz", + "integrity": "sha512-f654tZFQXS5YeLDNb9KySrwtg7SnqZN119FauD7acBoTzuLduaiGTNz88ixcVSOOMGZ+EjJu/RFtx5klObC95g==", "license": "Apache-2.0", "dependencies": { - "@opentelemetry/instrumentation": "^0.211.0" + "@opentelemetry/instrumentation": "^0.214.0" }, "engines": { "node": "^18.19.0 || >=20.6.0" @@ -2837,13 +3019,13 @@ } }, "node_modules/@opentelemetry/instrumentation-express": { - "version": "0.59.0", - "resolved": "https://registry.npmjs.org/@opentelemetry/instrumentation-express/-/instrumentation-express-0.59.0.tgz", - "integrity": "sha512-pMKV/qnHiW/Q6pmbKkxt0eIhuNEtvJ7sUAyee192HErlr+a1Jx+FZ3WjfmzhQL1geewyGEiPGkmjjAgNY8TgDA==", + "version": "0.62.0", + "resolved": "https://registry.npmjs.org/@opentelemetry/instrumentation-express/-/instrumentation-express-0.62.0.tgz", + "integrity": "sha512-Tvx+vgAZKEQxU3Rx+xWLiR0mLxHwmk69/8ya04+VsV9WYh8w6Lhx5hm5yAMvo1wy0KqWgFKBLwSeo3sHCwdOww==", "license": "Apache-2.0", "dependencies": { "@opentelemetry/core": "^2.0.0", - "@opentelemetry/instrumentation": "^0.211.0", + "@opentelemetry/instrumentation": "^0.214.0", "@opentelemetry/semantic-conventions": "^1.27.0" }, "engines": { @@ -2854,13 +3036,13 @@ } }, "node_modules/@opentelemetry/instrumentation-fs": { - "version": "0.30.0", - "resolved": "https://registry.npmjs.org/@opentelemetry/instrumentation-fs/-/instrumentation-fs-0.30.0.tgz", - "integrity": "sha512-n3Cf8YhG7reaj5dncGlRIU7iT40bxPOjsBEA5Bc1a1g6e9Qvb+JFJ7SEiMlPbUw4PBmxE3h40ltE8LZ3zVt6OA==", + "version": "0.33.0", + "resolved": "https://registry.npmjs.org/@opentelemetry/instrumentation-fs/-/instrumentation-fs-0.33.0.tgz", + "integrity": "sha512-sCZWXGalQ01wr3tAhSR9ucqFJ0phidpAle6/17HVjD6gN8FLmZMK/8sKxdXYHy3PbnlV1P4zeiSVFNKpbFMNLA==", "license": "Apache-2.0", "dependencies": { "@opentelemetry/core": "^2.0.0", - "@opentelemetry/instrumentation": "^0.211.0" + "@opentelemetry/instrumentation": "^0.214.0" }, "engines": { "node": "^18.19.0 || >=20.6.0" @@ -2870,12 +3052,12 @@ } }, "node_modules/@opentelemetry/instrumentation-generic-pool": { - "version": "0.54.0", - "resolved": "https://registry.npmjs.org/@opentelemetry/instrumentation-generic-pool/-/instrumentation-generic-pool-0.54.0.tgz", - "integrity": "sha512-8dXMBzzmEdXfH/wjuRvcJnUFeWzZHUnExkmFJ2uPfa31wmpyBCMxO59yr8f/OXXgSogNgi/uPo9KW9H7LMIZ+g==", + "version": "0.57.0", + "resolved": "https://registry.npmjs.org/@opentelemetry/instrumentation-generic-pool/-/instrumentation-generic-pool-0.57.0.tgz", + "integrity": "sha512-orhmlaK+ZIW9hKU+nHTbXrCSXZcH83AescTqmpamHRobRmYSQwRbD0a1odc0yAzuzOtxYiHiXAnpnIpaSSY7Ow==", "license": "Apache-2.0", "dependencies": { - "@opentelemetry/instrumentation": "^0.211.0" + "@opentelemetry/instrumentation": "^0.214.0" }, "engines": { "node": "^18.19.0 || >=20.6.0" @@ -2885,12 +3067,12 @@ } }, "node_modules/@opentelemetry/instrumentation-graphql": { - "version": "0.58.0", - "resolved": "https://registry.npmjs.org/@opentelemetry/instrumentation-graphql/-/instrumentation-graphql-0.58.0.tgz", - "integrity": "sha512-+yWVVY7fxOs3j2RixCbvue8vUuJ1inHxN2q1sduqDB0Wnkr4vOzVKRYl/Zy7B31/dcPS72D9lo/kltdOTBM3bQ==", + "version": "0.62.0", + "resolved": "https://registry.npmjs.org/@opentelemetry/instrumentation-graphql/-/instrumentation-graphql-0.62.0.tgz", + "integrity": "sha512-3YNuLVPUxafXkH1jBAbGsKNsP3XVzcFDhCDCE3OqBwCwShlqQbLMRMFh1T/d5jaVZiGVmSsfof+ICKD2iOV8xg==", "license": "Apache-2.0", "dependencies": { - "@opentelemetry/instrumentation": "^0.211.0" + "@opentelemetry/instrumentation": "^0.214.0" }, "engines": { "node": "^18.19.0 || >=20.6.0" @@ -2900,13 +3082,13 @@ } }, "node_modules/@opentelemetry/instrumentation-hapi": { - "version": "0.57.0", - "resolved": "https://registry.npmjs.org/@opentelemetry/instrumentation-hapi/-/instrumentation-hapi-0.57.0.tgz", - "integrity": "sha512-Os4THbvls8cTQTVA8ApLfZZztuuqGEeqog0XUnyRW7QVF0d/vOVBEcBCk1pazPFmllXGEdNbbat8e2fYIWdFbw==", + "version": "0.60.0", + "resolved": "https://registry.npmjs.org/@opentelemetry/instrumentation-hapi/-/instrumentation-hapi-0.60.0.tgz", + "integrity": "sha512-aNljZKYrEa7obLAxd1bCEDxF7kzCLGXTuTJZ8lMR9rIVEjmuKBXN1gfqpm/OB//Zc2zP4iIve1jBp7sr3mQV6w==", "license": "Apache-2.0", "dependencies": { "@opentelemetry/core": "^2.0.0", - "@opentelemetry/instrumentation": "^0.211.0", + "@opentelemetry/instrumentation": "^0.214.0", "@opentelemetry/semantic-conventions": "^1.27.0" }, "engines": { @@ -2917,13 +3099,13 @@ } }, "node_modules/@opentelemetry/instrumentation-http": { - "version": "0.211.0", - "resolved": "https://registry.npmjs.org/@opentelemetry/instrumentation-http/-/instrumentation-http-0.211.0.tgz", - "integrity": "sha512-n0IaQ6oVll9PP84SjbOCwDjaJasWRHi6BLsbMLiT6tNj7QbVOkuA5sk/EfZczwI0j5uTKl1awQPivO/ldVtsqA==", + "version": "0.214.0", + "resolved": "https://registry.npmjs.org/@opentelemetry/instrumentation-http/-/instrumentation-http-0.214.0.tgz", + "integrity": "sha512-FlkDhZDRjDJDcO2LcSCtjRpkal1NJ8y0fBqBhTvfAR3JSYY2jAIj1kSS5IjmEBt4c3aWv+u/lqLuoCDrrKCSKg==", "license": "Apache-2.0", "dependencies": { - "@opentelemetry/core": "2.5.0", - "@opentelemetry/instrumentation": "0.211.0", + "@opentelemetry/core": "2.6.1", + "@opentelemetry/instrumentation": "0.214.0", "@opentelemetry/semantic-conventions": "^1.29.0", "forwarded-parse": "2.1.2" }, @@ -2934,28 +3116,13 @@ "@opentelemetry/api": "^1.3.0" } }, - "node_modules/@opentelemetry/instrumentation-http/node_modules/@opentelemetry/core": { - "version": "2.5.0", - "resolved": "https://registry.npmjs.org/@opentelemetry/core/-/core-2.5.0.tgz", - "integrity": "sha512-ka4H8OM6+DlUhSAZpONu0cPBtPPTQKxbxVzC4CzVx5+K4JnroJVBtDzLAMx4/3CDTJXRvVFhpFjtl4SaiTNoyQ==", - "license": "Apache-2.0", - "dependencies": { - "@opentelemetry/semantic-conventions": "^1.29.0" - }, - "engines": { - "node": "^18.19.0 || >=20.6.0" - }, - "peerDependencies": { - "@opentelemetry/api": ">=1.0.0 <1.10.0" - } - }, "node_modules/@opentelemetry/instrumentation-ioredis": { - "version": "0.59.0", - "resolved": "https://registry.npmjs.org/@opentelemetry/instrumentation-ioredis/-/instrumentation-ioredis-0.59.0.tgz", - "integrity": "sha512-875UxzBHWkW+P4Y45SoFM2AR8f8TzBMD8eO7QXGCyFSCUMP5s9vtt/BS8b/r2kqLyaRPK6mLbdnZznK3XzQWvw==", + "version": "0.62.0", + "resolved": "https://registry.npmjs.org/@opentelemetry/instrumentation-ioredis/-/instrumentation-ioredis-0.62.0.tgz", + "integrity": "sha512-ZYt//zcPve8qklaZX+5Z4MkU7UpEkFRrxsf2cnaKYBitqDnsCN69CPAuuMOX6NYdW2rG9sFy7V/QWtBlP5XiNQ==", "license": "Apache-2.0", "dependencies": { - "@opentelemetry/instrumentation": "^0.211.0", + "@opentelemetry/instrumentation": "^0.214.0", "@opentelemetry/redis-common": "^0.38.2", "@opentelemetry/semantic-conventions": "^1.33.0" }, @@ -2967,12 +3134,12 @@ } }, "node_modules/@opentelemetry/instrumentation-kafkajs": { - "version": "0.20.0", - "resolved": "https://registry.npmjs.org/@opentelemetry/instrumentation-kafkajs/-/instrumentation-kafkajs-0.20.0.tgz", - "integrity": "sha512-yJXOuWZROzj7WmYCUiyT27tIfqBrVtl1/TwVbQyWPz7rL0r1Lu7kWjD0PiVeTCIL6CrIZ7M2s8eBxsTAOxbNvw==", + "version": "0.23.0", + "resolved": "https://registry.npmjs.org/@opentelemetry/instrumentation-kafkajs/-/instrumentation-kafkajs-0.23.0.tgz", + "integrity": "sha512-4K+nVo+zI+aDz0Z85SObwbdixIbzS9moIuKJaYsdlzcHYnKOPtB7ya8r8Ezivy/GVIBHiKJVq4tv+BEkgOMLaQ==", "license": "Apache-2.0", "dependencies": { - "@opentelemetry/instrumentation": "^0.211.0", + "@opentelemetry/instrumentation": "^0.214.0", "@opentelemetry/semantic-conventions": "^1.30.0" }, "engines": { @@ -2983,12 +3150,12 @@ } }, "node_modules/@opentelemetry/instrumentation-knex": { - "version": "0.55.0", - "resolved": "https://registry.npmjs.org/@opentelemetry/instrumentation-knex/-/instrumentation-knex-0.55.0.tgz", - "integrity": "sha512-FtTL5DUx5Ka/8VK6P1VwnlUXPa3nrb7REvm5ddLUIeXXq4tb9pKd+/ThB1xM/IjefkRSN3z8a5t7epYw1JLBJQ==", + "version": "0.58.0", + "resolved": "https://registry.npmjs.org/@opentelemetry/instrumentation-knex/-/instrumentation-knex-0.58.0.tgz", + "integrity": "sha512-Hc/o8fSsaWxZ8r1Yw4rNDLwTpUopTf4X32y4W6UhlHmW8Wizz8wfhgOKIelSeqFVTKBBPIDUOsQWuIMxBmu8Bw==", "license": "Apache-2.0", "dependencies": { - "@opentelemetry/instrumentation": "^0.211.0", + "@opentelemetry/instrumentation": "^0.214.0", "@opentelemetry/semantic-conventions": "^1.33.1" }, "engines": { @@ -2999,13 +3166,13 @@ } }, "node_modules/@opentelemetry/instrumentation-koa": { - "version": "0.59.0", - "resolved": "https://registry.npmjs.org/@opentelemetry/instrumentation-koa/-/instrumentation-koa-0.59.0.tgz", - "integrity": "sha512-K9o2skADV20Skdu5tG2bogPKiSpXh4KxfLjz6FuqIVvDJNibwSdu5UvyyBzRVp1rQMV6UmoIk6d3PyPtJbaGSg==", + "version": "0.62.0", + "resolved": "https://registry.npmjs.org/@opentelemetry/instrumentation-koa/-/instrumentation-koa-0.62.0.tgz", + "integrity": "sha512-uVip0VuGUQXZ+vFxkKxAUNq8qNl+VFlyHDh/U6IQ8COOEDfbEchdaHnpFrMYF3psZRUuoSIgb7xOeXj00RdwDA==", "license": "Apache-2.0", "dependencies": { "@opentelemetry/core": "^2.0.0", - "@opentelemetry/instrumentation": "^0.211.0", + "@opentelemetry/instrumentation": "^0.214.0", "@opentelemetry/semantic-conventions": "^1.36.0" }, "engines": { @@ -3016,12 +3183,12 @@ } }, "node_modules/@opentelemetry/instrumentation-lru-memoizer": { - "version": "0.55.0", - "resolved": "https://registry.npmjs.org/@opentelemetry/instrumentation-lru-memoizer/-/instrumentation-lru-memoizer-0.55.0.tgz", - "integrity": "sha512-FDBfT7yDGcspN0Cxbu/k8A0Pp1Jhv/m7BMTzXGpcb8ENl3tDj/51U65R5lWzUH15GaZA15HQ5A5wtafklxYj7g==", + "version": "0.58.0", + "resolved": "https://registry.npmjs.org/@opentelemetry/instrumentation-lru-memoizer/-/instrumentation-lru-memoizer-0.58.0.tgz", + "integrity": "sha512-6grM3TdMyHzlGY1cUA+mwoPueB1F3dYKgKtZIH6jOFXqfHAByyLTc+6PFjGM9tKh52CFBJaDwodNlL/Td39z7Q==", "license": "Apache-2.0", "dependencies": { - "@opentelemetry/instrumentation": "^0.211.0" + "@opentelemetry/instrumentation": "^0.214.0" }, "engines": { "node": "^18.19.0 || >=20.6.0" @@ -3031,12 +3198,12 @@ } }, "node_modules/@opentelemetry/instrumentation-mongodb": { - "version": "0.64.0", - "resolved": "https://registry.npmjs.org/@opentelemetry/instrumentation-mongodb/-/instrumentation-mongodb-0.64.0.tgz", - "integrity": "sha512-pFlCJjweTqVp7B220mCvCld1c1eYKZfQt1p3bxSbcReypKLJTwat+wbL2YZoX9jPi5X2O8tTKFEOahO5ehQGsA==", + "version": "0.67.0", + "resolved": "https://registry.npmjs.org/@opentelemetry/instrumentation-mongodb/-/instrumentation-mongodb-0.67.0.tgz", + "integrity": "sha512-1WJp5N1lYfHq2IhECOTewFs5Tf2NfUOwQRqs/rZdXKTezArMlucxgzAaqcgp3A3YREXopXTpXHsxZTGHjNhMdQ==", "license": "Apache-2.0", "dependencies": { - "@opentelemetry/instrumentation": "^0.211.0", + "@opentelemetry/instrumentation": "^0.214.0", "@opentelemetry/semantic-conventions": "^1.33.0" }, "engines": { @@ -3047,13 +3214,13 @@ } }, "node_modules/@opentelemetry/instrumentation-mongoose": { - "version": "0.57.0", - "resolved": "https://registry.npmjs.org/@opentelemetry/instrumentation-mongoose/-/instrumentation-mongoose-0.57.0.tgz", - "integrity": "sha512-MthiekrU/BAJc5JZoZeJmo0OTX6ycJMiP6sMOSRTkvz5BrPMYDqaJos0OgsLPL/HpcgHP7eo5pduETuLguOqcg==", + "version": "0.60.0", + "resolved": "https://registry.npmjs.org/@opentelemetry/instrumentation-mongoose/-/instrumentation-mongoose-0.60.0.tgz", + "integrity": "sha512-8BahAZpKsOoc+lrZGb7Ofn4g3z8qtp5IxDfvAVpKXsEheQN7ONMH5djT5ihy6yf8yyeQJGS0gXFfpEAEeEHqQg==", "license": "Apache-2.0", "dependencies": { "@opentelemetry/core": "^2.0.0", - "@opentelemetry/instrumentation": "^0.211.0", + "@opentelemetry/instrumentation": "^0.214.0", "@opentelemetry/semantic-conventions": "^1.33.0" }, "engines": { @@ -3064,12 +3231,12 @@ } }, "node_modules/@opentelemetry/instrumentation-mysql": { - "version": "0.57.0", - "resolved": "https://registry.npmjs.org/@opentelemetry/instrumentation-mysql/-/instrumentation-mysql-0.57.0.tgz", - "integrity": "sha512-HFS/+FcZ6Q7piM7Il7CzQ4VHhJvGMJWjx7EgCkP5AnTntSN5rb5Xi3TkYJHBKeR27A0QqPlGaCITi93fUDs++Q==", + "version": "0.60.0", + "resolved": "https://registry.npmjs.org/@opentelemetry/instrumentation-mysql/-/instrumentation-mysql-0.60.0.tgz", + "integrity": "sha512-08pO8GFPEIz2zquKDGteBZDNmwketdgH8hTe9rVYgW9kCJXq1Psj3wPQGx+VaX4ZJKCfPeoLMYup9+cxHvZyVQ==", "license": "Apache-2.0", "dependencies": { - "@opentelemetry/instrumentation": "^0.211.0", + "@opentelemetry/instrumentation": "^0.214.0", "@opentelemetry/semantic-conventions": "^1.33.0", "@types/mysql": "2.15.27" }, @@ -3081,12 +3248,12 @@ } }, "node_modules/@opentelemetry/instrumentation-mysql2": { - "version": "0.57.0", - "resolved": "https://registry.npmjs.org/@opentelemetry/instrumentation-mysql2/-/instrumentation-mysql2-0.57.0.tgz", - "integrity": "sha512-nHSrYAwF7+aV1E1V9yOOP9TchOodb6fjn4gFvdrdQXiRE7cMuffyLLbCZlZd4wsspBzVwOXX8mpURdRserAhNA==", + "version": "0.60.0", + "resolved": "https://registry.npmjs.org/@opentelemetry/instrumentation-mysql2/-/instrumentation-mysql2-0.60.0.tgz", + "integrity": "sha512-m/5d3bxQALllCzezYDk/6vajh0tj5OijMMvOZGr+qN1NMXm1dzMNwyJ0gNZW7Fo3YFRyj/jJMxIw+W7d525dlw==", "license": "Apache-2.0", "dependencies": { - "@opentelemetry/instrumentation": "^0.211.0", + "@opentelemetry/instrumentation": "^0.214.0", "@opentelemetry/semantic-conventions": "^1.33.0", "@opentelemetry/sql-common": "^0.41.2" }, @@ -3098,13 +3265,13 @@ } }, "node_modules/@opentelemetry/instrumentation-pg": { - "version": "0.63.0", - "resolved": "https://registry.npmjs.org/@opentelemetry/instrumentation-pg/-/instrumentation-pg-0.63.0.tgz", - "integrity": "sha512-dKm/ODNN3GgIQVlbD6ZPxwRc3kleLf95hrRWXM+l8wYo+vSeXtEpQPT53afEf6VFWDVzJK55VGn8KMLtSve/cg==", + "version": "0.66.0", + "resolved": "https://registry.npmjs.org/@opentelemetry/instrumentation-pg/-/instrumentation-pg-0.66.0.tgz", + "integrity": "sha512-KxfLGXBb7k2ueaPJfq2GXBDXBly8P+SpR/4Mj410hhNgmQF3sCqwXvUBQxZQkDAmsdBAoenM+yV1LhtsMRamcA==", "license": "Apache-2.0", "dependencies": { "@opentelemetry/core": "^2.0.0", - "@opentelemetry/instrumentation": "^0.211.0", + "@opentelemetry/instrumentation": "^0.214.0", "@opentelemetry/semantic-conventions": "^1.34.0", "@opentelemetry/sql-common": "^0.41.2", "@types/pg": "8.15.6", @@ -3129,12 +3296,12 @@ } }, "node_modules/@opentelemetry/instrumentation-redis": { - "version": "0.59.0", - "resolved": "https://registry.npmjs.org/@opentelemetry/instrumentation-redis/-/instrumentation-redis-0.59.0.tgz", - "integrity": "sha512-JKv1KDDYA2chJ1PC3pLP+Q9ISMQk6h5ey+99mB57/ARk0vQPGZTTEb4h4/JlcEpy7AYT8HIGv7X6l+br03Neeg==", + "version": "0.62.0", + "resolved": "https://registry.npmjs.org/@opentelemetry/instrumentation-redis/-/instrumentation-redis-0.62.0.tgz", + "integrity": "sha512-y3pPpot7WzR/8JtHcYlTYsyY8g+pbFhAqbwAuG5bLPnR6v6pt1rQc0DpH0OlGP/9CZbWBP+Zhwp9yFoygf/ZXQ==", "license": "Apache-2.0", "dependencies": { - "@opentelemetry/instrumentation": "^0.211.0", + "@opentelemetry/instrumentation": "^0.214.0", "@opentelemetry/redis-common": "^0.38.2", "@opentelemetry/semantic-conventions": "^1.27.0" }, @@ -3146,12 +3313,12 @@ } }, "node_modules/@opentelemetry/instrumentation-tedious": { - "version": "0.30.0", - "resolved": "https://registry.npmjs.org/@opentelemetry/instrumentation-tedious/-/instrumentation-tedious-0.30.0.tgz", - "integrity": "sha512-bZy9Q8jFdycKQ2pAsyuHYUHNmCxCOGdG6eg1Mn75RvQDccq832sU5OWOBnc12EFUELI6icJkhR7+EQKMBam2GA==", + "version": "0.33.0", + "resolved": "https://registry.npmjs.org/@opentelemetry/instrumentation-tedious/-/instrumentation-tedious-0.33.0.tgz", + "integrity": "sha512-Q6WQwAD01MMTub31GlejoiFACYNw26J426wyjvU7by7fDIr2nZXNW4vhTGs7i7F0TnXBO3xN688g1tdUgYwJ5w==", "license": "Apache-2.0", "dependencies": { - "@opentelemetry/instrumentation": "^0.211.0", + "@opentelemetry/instrumentation": "^0.214.0", "@opentelemetry/semantic-conventions": "^1.33.0", "@types/tedious": "^4.0.14" }, @@ -3163,13 +3330,13 @@ } }, "node_modules/@opentelemetry/instrumentation-undici": { - "version": "0.21.0", - "resolved": "https://registry.npmjs.org/@opentelemetry/instrumentation-undici/-/instrumentation-undici-0.21.0.tgz", - "integrity": "sha512-gok0LPUOTz2FQ1YJMZzaHcOzDFyT64XJ8M9rNkugk923/p6lDGms/cRW1cqgqp6N6qcd6K6YdVHwPEhnx9BWbw==", + "version": "0.24.0", + "resolved": "https://registry.npmjs.org/@opentelemetry/instrumentation-undici/-/instrumentation-undici-0.24.0.tgz", + "integrity": "sha512-oKzZ3uvqP17sV0EsoQcJgjEfIp0kiZRbYu/eD8p13Cbahumf8lb/xpYeNr/hfAJ4owzEtIDcGIjprfLcYbIKBQ==", "license": "Apache-2.0", "dependencies": { "@opentelemetry/core": "^2.0.0", - "@opentelemetry/instrumentation": "^0.211.0", + "@opentelemetry/instrumentation": "^0.214.0", "@opentelemetry/semantic-conventions": "^1.24.0" }, "engines": { @@ -3189,12 +3356,12 @@ } }, "node_modules/@opentelemetry/resources": { - "version": "2.5.1", - "resolved": "https://registry.npmjs.org/@opentelemetry/resources/-/resources-2.5.1.tgz", - "integrity": "sha512-BViBCdE/GuXRlp9k7nS1w6wJvY5fnFX5XvuEtWsTAOQFIO89Eru7lGW3WbfbxtCuZ/GbrJfAziXG0w0dpxL7eQ==", + "version": "2.6.1", + "resolved": "https://registry.npmjs.org/@opentelemetry/resources/-/resources-2.6.1.tgz", + "integrity": "sha512-lID/vxSuKWXM55XhAKNoYXu9Cutoq5hFdkbTdI/zDKQktXzcWBVhNsOkiZFTMU9UtEWuGRNe0HUgmsFldIdxVA==", "license": "Apache-2.0", "dependencies": { - "@opentelemetry/core": "2.5.1", + "@opentelemetry/core": "2.6.1", "@opentelemetry/semantic-conventions": "^1.29.0" }, "engines": { @@ -3205,13 +3372,13 @@ } }, "node_modules/@opentelemetry/sdk-trace-base": { - "version": "2.5.1", - "resolved": "https://registry.npmjs.org/@opentelemetry/sdk-trace-base/-/sdk-trace-base-2.5.1.tgz", - "integrity": "sha512-iZH3Gw8cxQn0gjpOjJMmKLd9GIaNh/E3v3ST67vyzLSxHBs14HsG4dy7jMYyC5WXGdBVEcM7U/XTF5hCQxjDMw==", + "version": "2.6.1", + "resolved": "https://registry.npmjs.org/@opentelemetry/sdk-trace-base/-/sdk-trace-base-2.6.1.tgz", + "integrity": "sha512-r86ut4T1e8vNwB35CqCcKd45yzqH6/6Wzvpk2/cZB8PsPLlZFTvrh8yfOS3CYZYcUmAx4hHTZJ8AO8Dj8nrdhw==", "license": "Apache-2.0", "dependencies": { - "@opentelemetry/core": "2.5.1", - "@opentelemetry/resources": "2.5.1", + "@opentelemetry/core": "2.6.1", + "@opentelemetry/resources": "2.6.1", "@opentelemetry/semantic-conventions": "^1.29.0" }, "engines": { @@ -3222,9 +3389,9 @@ } }, "node_modules/@opentelemetry/semantic-conventions": { - "version": "1.39.0", - "resolved": "https://registry.npmjs.org/@opentelemetry/semantic-conventions/-/semantic-conventions-1.39.0.tgz", - "integrity": "sha512-R5R9tb2AXs2IRLNKLBJDynhkfmx7mX0vi8NkhZb3gUkPWHn6HXk5J8iQ/dql0U3ApfWym4kXXmBDRGO+oeOfjg==", + "version": "1.40.0", + "resolved": "https://registry.npmjs.org/@opentelemetry/semantic-conventions/-/semantic-conventions-1.40.0.tgz", + "integrity": "sha512-cifvXDhcqMwwTlTK04GBNeIe7yyo28Mfby85QXFe1Yk8nmi36Ab/5UQwptOx84SsoGNRg+EVSjwzfSZMy6pmlw==", "license": "Apache-2.0", "engines": { "node": ">=14" @@ -3254,9 +3421,9 @@ } }, "node_modules/@prisma/instrumentation": { - "version": "7.2.0", - "resolved": "https://registry.npmjs.org/@prisma/instrumentation/-/instrumentation-7.2.0.tgz", - "integrity": "sha512-Rh9Z4x5kEj1OdARd7U18AtVrnL6rmLSI0qYShaB4W7Wx5BKbgzndWF+QnuzMb7GLfVdlT5aYCXoPQVYuYtVu0g==", + "version": "7.6.0", + "resolved": "https://registry.npmjs.org/@prisma/instrumentation/-/instrumentation-7.6.0.tgz", + "integrity": "sha512-ZPW2gRiwpPzEfgeZgaekhqXrbW+Y2RJKHVqUmlhZhKzRNCcvR6DykzylDrynpArKKRQtLxoZy36fK7U0p3pdgQ==", "license": "Apache-2.0", "dependencies": { "@opentelemetry/instrumentation": "^0.207.0" @@ -3294,6 +3461,18 @@ "@opentelemetry/api": "^1.3.0" } }, + "node_modules/@prisma/instrumentation/node_modules/import-in-the-middle": { + "version": "2.0.6", + "resolved": "https://registry.npmjs.org/import-in-the-middle/-/import-in-the-middle-2.0.6.tgz", + "integrity": "sha512-3vZV3jX0XRFW3EJDTwzWoZa+RH1b8eTTx6YOCjglrLyPuepwoBti1k3L2dKwdCUrnVEfc5CuRuGstaC/uQJJaw==", + "license": "Apache-2.0", + "dependencies": { + "acorn": "^8.15.0", + "acorn-import-attributes": "^1.9.5", + "cjs-module-lexer": "^2.2.0", + "module-details-from-path": "^1.0.4" + } + }, "node_modules/@protobufjs/aspromise": { "version": "1.1.2", "license": "BSD-3-Clause" @@ -3689,70 +3868,69 @@ ] }, "node_modules/@sentry/core": { - "version": "10.39.0", - "resolved": "https://registry.npmjs.org/@sentry/core/-/core-10.39.0.tgz", - "integrity": "sha512-xCLip2mBwCdRrvXHtVEULX0NffUTYZZBhEUGht0WFL+GNdNQ7gmBOGOczhZlrf2hgFFtDO0fs1xiP9bqq5orEQ==", + "version": "10.47.0", + "resolved": "https://registry.npmjs.org/@sentry/core/-/core-10.47.0.tgz", + "integrity": "sha512-nsYRAx3EWezDut+Zl+UwwP07thh9uY7CfSAi2whTdcJl5hu1nSp2z8bba7Vq/MGbNLnazkd3A+GITBEML924JA==", "license": "MIT", "engines": { "node": ">=18" } }, "node_modules/@sentry/node": { - "version": "10.39.0", - "resolved": "https://registry.npmjs.org/@sentry/node/-/node-10.39.0.tgz", - "integrity": "sha512-dx66DtU/xkCTPEDsjU+mYSIEbzu06pzKNQcDA2wvx7wvwsUciZ5yA32Ce/o6p2uHHgy0/joJX9rP5J/BIijaOA==", - "license": "MIT", - "dependencies": { - "@opentelemetry/api": "^1.9.0", - "@opentelemetry/context-async-hooks": "^2.5.0", - "@opentelemetry/core": "^2.5.0", - "@opentelemetry/instrumentation": "^0.211.0", - "@opentelemetry/instrumentation-amqplib": "0.58.0", - "@opentelemetry/instrumentation-connect": "0.54.0", - "@opentelemetry/instrumentation-dataloader": "0.28.0", - "@opentelemetry/instrumentation-express": "0.59.0", - "@opentelemetry/instrumentation-fs": "0.30.0", - "@opentelemetry/instrumentation-generic-pool": "0.54.0", - "@opentelemetry/instrumentation-graphql": "0.58.0", - "@opentelemetry/instrumentation-hapi": "0.57.0", - "@opentelemetry/instrumentation-http": "0.211.0", - "@opentelemetry/instrumentation-ioredis": "0.59.0", - "@opentelemetry/instrumentation-kafkajs": "0.20.0", - "@opentelemetry/instrumentation-knex": "0.55.0", - "@opentelemetry/instrumentation-koa": "0.59.0", - "@opentelemetry/instrumentation-lru-memoizer": "0.55.0", - "@opentelemetry/instrumentation-mongodb": "0.64.0", - "@opentelemetry/instrumentation-mongoose": "0.57.0", - "@opentelemetry/instrumentation-mysql": "0.57.0", - "@opentelemetry/instrumentation-mysql2": "0.57.0", - "@opentelemetry/instrumentation-pg": "0.63.0", - "@opentelemetry/instrumentation-redis": "0.59.0", - "@opentelemetry/instrumentation-tedious": "0.30.0", - "@opentelemetry/instrumentation-undici": "0.21.0", - "@opentelemetry/resources": "^2.5.0", - "@opentelemetry/sdk-trace-base": "^2.5.0", - "@opentelemetry/semantic-conventions": "^1.39.0", - "@prisma/instrumentation": "7.2.0", - "@sentry/core": "10.39.0", - "@sentry/node-core": "10.39.0", - "@sentry/opentelemetry": "10.39.0", - "import-in-the-middle": "^2.0.6", - "minimatch": "^9.0.0" + "version": "10.47.0", + "resolved": "https://registry.npmjs.org/@sentry/node/-/node-10.47.0.tgz", + "integrity": "sha512-R+btqPepv88o635G6HtVewLjqCLUedBg5HBs7Nq1qbbKvyti01uArUF2f+3DsLenk5B9LUNiRlE+frZA44Ahmw==", + "license": "MIT", + "dependencies": { + "@fastify/otel": "0.18.0", + "@opentelemetry/api": "^1.9.1", + "@opentelemetry/context-async-hooks": "^2.6.1", + "@opentelemetry/core": "^2.6.1", + "@opentelemetry/instrumentation": "^0.214.0", + "@opentelemetry/instrumentation-amqplib": "0.61.0", + "@opentelemetry/instrumentation-connect": "0.57.0", + "@opentelemetry/instrumentation-dataloader": "0.31.0", + "@opentelemetry/instrumentation-express": "0.62.0", + "@opentelemetry/instrumentation-fs": "0.33.0", + "@opentelemetry/instrumentation-generic-pool": "0.57.0", + "@opentelemetry/instrumentation-graphql": "0.62.0", + "@opentelemetry/instrumentation-hapi": "0.60.0", + "@opentelemetry/instrumentation-http": "0.214.0", + "@opentelemetry/instrumentation-ioredis": "0.62.0", + "@opentelemetry/instrumentation-kafkajs": "0.23.0", + "@opentelemetry/instrumentation-knex": "0.58.0", + "@opentelemetry/instrumentation-koa": "0.62.0", + "@opentelemetry/instrumentation-lru-memoizer": "0.58.0", + "@opentelemetry/instrumentation-mongodb": "0.67.0", + "@opentelemetry/instrumentation-mongoose": "0.60.0", + "@opentelemetry/instrumentation-mysql": "0.60.0", + "@opentelemetry/instrumentation-mysql2": "0.60.0", + "@opentelemetry/instrumentation-pg": "0.66.0", + "@opentelemetry/instrumentation-redis": "0.62.0", + "@opentelemetry/instrumentation-tedious": "0.33.0", + "@opentelemetry/instrumentation-undici": "0.24.0", + "@opentelemetry/resources": "^2.6.1", + "@opentelemetry/sdk-trace-base": "^2.6.1", + "@opentelemetry/semantic-conventions": "^1.40.0", + "@prisma/instrumentation": "7.6.0", + "@sentry/core": "10.47.0", + "@sentry/node-core": "10.47.0", + "@sentry/opentelemetry": "10.47.0", + "import-in-the-middle": "^3.0.0" }, "engines": { "node": ">=18" } }, "node_modules/@sentry/node-core": { - "version": "10.39.0", - "resolved": "https://registry.npmjs.org/@sentry/node-core/-/node-core-10.39.0.tgz", - "integrity": "sha512-xdeBG00TmtAcGvXnZNbqOCvnZ5kY3s5aT/L8wUQ0w0TT2KmrC9XL/7UHUfJ45TLbjl10kZOtaMQXgUjpwSJW+g==", + "version": "10.47.0", + "resolved": "https://registry.npmjs.org/@sentry/node-core/-/node-core-10.47.0.tgz", + "integrity": "sha512-qv6LsqHbkQmd0aQEUox/svRSz26J+l4gGjFOUNEay2armZu9XLD+Ct89jpFgZD5oIPNAj2jraodTRqydXiwS5w==", "license": "MIT", "dependencies": { - "@apm-js-collab/tracing-hooks": "^0.3.1", - "@sentry/core": "10.39.0", - "@sentry/opentelemetry": "10.39.0", - "import-in-the-middle": "^2.0.6" + "@sentry/core": "10.47.0", + "@sentry/opentelemetry": "10.47.0", + "import-in-the-middle": "^3.0.0" }, "engines": { "node": ">=18" @@ -3761,6 +3939,7 @@ "@opentelemetry/api": "^1.9.0", "@opentelemetry/context-async-hooks": "^1.30.1 || ^2.1.0", "@opentelemetry/core": "^1.30.1 || ^2.1.0", + "@opentelemetry/exporter-trace-otlp-http": ">=0.57.0 <1", "@opentelemetry/instrumentation": ">=0.57.1 <1", "@opentelemetry/resources": "^1.30.1 || ^2.1.0", "@opentelemetry/sdk-trace-base": "^1.30.1 || ^2.1.0", @@ -3776,6 +3955,9 @@ "@opentelemetry/core": { "optional": true }, + "@opentelemetry/exporter-trace-otlp-http": { + "optional": true + }, "@opentelemetry/instrumentation": { "optional": true }, @@ -3790,28 +3972,13 @@ } } }, - "node_modules/@sentry/node/node_modules/minimatch": { - "version": "9.0.9", - "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-9.0.9.tgz", - "integrity": "sha512-OBwBN9AL4dqmETlpS2zasx+vTeWclWzkblfZk7KTA5j3jeOONz/tRCnZomUyvNg83wL5Zv9Ss6HMJXAgL8R2Yg==", - "license": "ISC", - "dependencies": { - "brace-expansion": "^2.0.2" - }, - "engines": { - "node": ">=16 || 14 >=14.17" - }, - "funding": { - "url": "https://github.com/sponsors/isaacs" - } - }, "node_modules/@sentry/opentelemetry": { - "version": "10.39.0", - "resolved": "https://registry.npmjs.org/@sentry/opentelemetry/-/opentelemetry-10.39.0.tgz", - "integrity": "sha512-eU8t/pyxjy7xYt6PNCVxT+8SJw5E3pnupdcUNN4ClqG4O5lX4QCDLtId48ki7i30VqrLtR7vmCHMSvqXXdvXPA==", + "version": "10.47.0", + "resolved": "https://registry.npmjs.org/@sentry/opentelemetry/-/opentelemetry-10.47.0.tgz", + "integrity": "sha512-f6Hw2lrpCjlOksiosP0Z2jK/+l+21SIdoNglVeG/sttMyx8C8ywONKh0Ha50sFsvB1VaB8n94RKzzf3hkh9V3g==", "license": "MIT", "dependencies": { - "@sentry/core": "10.39.0" + "@sentry/core": "10.47.0" }, "engines": { "node": ">=18" @@ -3824,6 +3991,35 @@ "@opentelemetry/semantic-conventions": "^1.39.0" } }, + "node_modules/@simple-libs/child-process-utils": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/@simple-libs/child-process-utils/-/child-process-utils-1.0.2.tgz", + "integrity": "sha512-/4R8QKnd/8agJynkNdJmNw2MBxuFTRcNFnE5Sg/G+jkSsV8/UBgULMzhizWWW42p8L5H7flImV2ATi79Ove2Tw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@simple-libs/stream-utils": "^1.2.0" + }, + "engines": { + "node": ">=18" + }, + "funding": { + "url": "https://ko-fi.com/dangreen" + } + }, + "node_modules/@simple-libs/stream-utils": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/@simple-libs/stream-utils/-/stream-utils-1.2.0.tgz", + "integrity": "sha512-KxXvfapcixpz6rVEB6HPjOUZT22yN6v0vI0urQSk1L8MlEWPDFCZkhw2xmkyoTGYeFw7tWTZd7e3lVzRZRN/EA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=18" + }, + "funding": { + "url": "https://ko-fi.com/dangreen" + } + }, "node_modules/@sindresorhus/is": { "version": "4.6.0", "license": "MIT", @@ -3883,32 +4079,40 @@ } }, "node_modules/@trpc/client": { - "version": "11.10.0", - "resolved": "https://registry.npmjs.org/@trpc/client/-/client-11.10.0.tgz", - "integrity": "sha512-h0s2AwDtuhS8INRb4hlo4z3RKCkarWqlOy+3ffJgrlDxzzW6aLUN+9nDrcN4huPje1Em15tbCOqhIc6oaKYTRw==", + "version": "11.16.0", + "resolved": "https://registry.npmjs.org/@trpc/client/-/client-11.16.0.tgz", + "integrity": "sha512-TxIzm7OoK3baKZ0XCbuMUbI3GhgjcbKHIc4nWVKaRpCRnbSh0T31BT6fTPYwtnA/Nur8pBCGqC2B4J5hEPiPFQ==", "funding": [ "https://trpc.io/sponsor" ], "license": "MIT", + "bin": { + "intent": "bin/intent.js" + }, "peerDependencies": { - "@trpc/server": "11.10.0", + "@trpc/server": "11.16.0", "typescript": ">=5.7.2" } }, "node_modules/@trpc/server": { - "version": "11.10.0", - "resolved": "https://registry.npmjs.org/@trpc/server/-/server-11.10.0.tgz", - "integrity": "sha512-zZjTrR6He61e5TiT7e/bQqab/jRcXBZM8Fg78Yoo8uh5pz60dzzbYuONNUCOkafv5ppXVMms4NHYfNZgzw50vg==", + "version": "11.16.0", + "resolved": "https://registry.npmjs.org/@trpc/server/-/server-11.16.0.tgz", + "integrity": "sha512-XgGuUMddrUTd04+za/WE5GFuZ1/YU9XQG0t3VL5WOIu2JspkOlq6k4RYEiqS6HSJt+S0RXaPdIoE2anIP/BBRQ==", "funding": [ "https://trpc.io/sponsor" ], "license": "MIT", + "bin": { + "intent": "bin/intent.js" + }, "peerDependencies": { "typescript": ">=5.7.2" } }, "node_modules/@types/adm-zip": { - "version": "0.5.7", + "version": "0.5.8", + "resolved": "https://registry.npmjs.org/@types/adm-zip/-/adm-zip-0.5.8.tgz", + "integrity": "sha512-RVVH7QvZYbN+ihqZ4kX/dMiowf6o+Jk1fNwiSdx0NahBJLU787zkULhGhJM8mf/obmLGmgdMM0bXsQTmyfbR7Q==", "dev": true, "license": "MIT", "dependencies": { @@ -3959,14 +4163,6 @@ "@types/node": "*" } }, - "node_modules/@types/conventional-commits-parser": { - "version": "5.0.2", - "dev": true, - "license": "MIT", - "dependencies": { - "@types/node": "*" - } - }, "node_modules/@types/deep-eql": { "version": "4.0.2", "resolved": "https://registry.npmjs.org/@types/deep-eql/-/deep-eql-4.0.2.tgz", @@ -4024,16 +4220,18 @@ } }, "node_modules/@types/node": { - "version": "22.19.3", + "version": "22.19.15", + "resolved": "https://registry.npmjs.org/@types/node/-/node-22.19.15.tgz", + "integrity": "sha512-F0R/h2+dsy5wJAUe3tAU6oqa2qbWY5TpNfL/RGmo1y38hiyO1w3x2jPtt76wmuaJI4DQnOBu21cNXQ2STIUUWg==", "license": "MIT", "dependencies": { "undici-types": "~6.21.0" } }, "node_modules/@types/pg": { - "version": "8.16.0", - "resolved": "https://registry.npmjs.org/@types/pg/-/pg-8.16.0.tgz", - "integrity": "sha512-RmhMd/wD+CF8Dfo+cVIy3RR5cl8CyfXQ0tGgW6XBL8L4LM/UTEbNXYRbLwU6w+CgrKBNbrQWt4FUtTfaU5jSYQ==", + "version": "8.20.0", + "resolved": "https://registry.npmjs.org/@types/pg/-/pg-8.20.0.tgz", + "integrity": "sha512-bEPFOaMAHTEP1EzpvHTbmwR8UsFyHSKsRisLIHVMXnpNefSbGA1bD6CVy+qKjGSqmZqNqBDV2azOBo8TgkcVow==", "license": "MIT", "dependencies": { "@types/node": "*", @@ -4293,10 +4491,48 @@ "node": ">=6.5" } }, - "node_modules/acorn": { - "version": "8.16.0", - "resolved": "https://registry.npmjs.org/acorn/-/acorn-8.16.0.tgz", - "integrity": "sha512-UVJyE9MttOsBQIDKw1skb9nAwQuR5wuGD3+82K6JgJlm/Y+KI92oNsMNGZCYdDsVtRHSak0pcV5Dno5+4jh9sw==", + "node_modules/accepts": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/accepts/-/accepts-2.0.0.tgz", + "integrity": "sha512-5cvg6CtKwfgdmVqY1WIiXKc3Q1bkRqGLi+2W/6ao+6Y7gu/RCwRuAhGEzh5B4KlszSuTLgZYuqFqo5bImjNKng==", + "license": "MIT", + "dependencies": { + "mime-types": "^3.0.0", + "negotiator": "^1.0.0" + }, + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/accepts/node_modules/mime-db": { + "version": "1.54.0", + "resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.54.0.tgz", + "integrity": "sha512-aU5EJuIN2WDemCcAp2vFBfp/m4EAhWJnUNSSw0ixs7/kXbd6Pg64EmwJkNdFhB8aWt1sH2CTXrLxo/iAGV3oPQ==", + "license": "MIT", + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/accepts/node_modules/mime-types": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-3.0.2.tgz", + "integrity": "sha512-Lbgzdk0h4juoQ9fCKXW4by0UJqj+nOOrI9MJ1sSj4nI8aI2eo1qmvQEie4VD1glsS250n15LsWsYtCugiStS5A==", + "license": "MIT", + "dependencies": { + "mime-db": "^1.54.0" + }, + "engines": { + "node": ">=18" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/express" + } + }, + "node_modules/acorn": { + "version": "8.16.0", + "resolved": "https://registry.npmjs.org/acorn/-/acorn-8.16.0.tgz", + "integrity": "sha512-UVJyE9MttOsBQIDKw1skb9nAwQuR5wuGD3+82K6JgJlm/Y+KI92oNsMNGZCYdDsVtRHSak0pcV5Dno5+4jh9sw==", "license": "MIT", "bin": { "acorn": "bin/acorn" @@ -4325,7 +4561,6 @@ "version": "8.18.0", "resolved": "https://registry.npmjs.org/ajv/-/ajv-8.18.0.tgz", "integrity": "sha512-PlXPeEWMXMZ7sPYOHqmDyCJzcfNrUr3fGNKtezX14ykXOEIvyK81d+qydx89KY5O71FKMPaQ2vBfBFI5NHR63A==", - "dev": true, "license": "MIT", "dependencies": { "fast-deep-equal": "^3.1.3", @@ -4338,6 +4573,23 @@ "url": "https://github.com/sponsors/epoberezkin" } }, + "node_modules/ajv-formats": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/ajv-formats/-/ajv-formats-3.0.1.tgz", + "integrity": "sha512-8iUql50EUR+uUcdRQ3HDqa6EVyo3docL8g5WJ3FNcWmu62IbkGUue/pEyLBW8VGKKucTPgqeks4fIU1DA4yowQ==", + "license": "MIT", + "dependencies": { + "ajv": "^8.0.0" + }, + "peerDependencies": { + "ajv": "^8.0.0" + }, + "peerDependenciesMeta": { + "ajv": { + "optional": true + } + } + }, "node_modules/ansi-escapes": { "version": "7.3.0", "resolved": "https://registry.npmjs.org/ansi-escapes/-/ansi-escapes-7.3.0.tgz", @@ -4571,11 +4823,15 @@ }, "node_modules/array-ify": { "version": "1.0.0", + "resolved": "https://registry.npmjs.org/array-ify/-/array-ify-1.0.0.tgz", + "integrity": "sha512-c5AMf34bKdvPhQ7tBGhqkgKNUzMr4WUs+WDtC2ZUGOUncbxKMTvqxYctiseW3+L4bA8ec+GcZ6/A/FW4m8ukng==", "dev": true, "license": "MIT" }, "node_modules/asn1": { "version": "0.2.6", + "resolved": "https://registry.npmjs.org/asn1/-/asn1-0.2.6.tgz", + "integrity": "sha512-ix/FxPn0MDjeyJ7i/yoHGFt/EX6LyNbxSEhPPXODPL+KB0VPk86UYfL0lMdy+KCnv+fmvIzySwaK5COwqVbWTQ==", "license": "MIT", "dependencies": { "safer-buffer": "~2.1.0" @@ -4691,6 +4947,8 @@ }, "node_modules/bcrypt-pbkdf": { "version": "1.0.2", + "resolved": "https://registry.npmjs.org/bcrypt-pbkdf/-/bcrypt-pbkdf-1.0.2.tgz", + "integrity": "sha512-qeFIXtP4MSoi6NLqO12WfqARWWuCKi2Rn/9hJLEmtB5yTNr9DqFWkJRCf2qShWzPeAMRnOgCrq0sg/KLv5ES9w==", "license": "BSD-3-Clause", "dependencies": { "tweetnacl": "^0.14.3" @@ -4726,6 +4984,30 @@ "readable-stream": "^3.4.0" } }, + "node_modules/body-parser": { + "version": "2.2.2", + "resolved": "https://registry.npmjs.org/body-parser/-/body-parser-2.2.2.tgz", + "integrity": "sha512-oP5VkATKlNwcgvxi0vM0p/D3n2C3EReYVX+DNYs5TjZFn/oQt2j+4sVJtSMr18pdRr8wjTcBl6LoV+FUwzPmNA==", + "license": "MIT", + "dependencies": { + "bytes": "^3.1.2", + "content-type": "^1.0.5", + "debug": "^4.4.3", + "http-errors": "^2.0.0", + "iconv-lite": "^0.7.0", + "on-finished": "^2.4.1", + "qs": "^6.14.1", + "raw-body": "^3.0.1", + "type-is": "^2.0.1" + }, + "engines": { + "node": ">=18" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/express" + } + }, "node_modules/brace-expansion": { "version": "2.0.3", "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.3.tgz", @@ -4777,20 +5059,24 @@ }, "node_modules/buildcheck": { "version": "0.0.7", + "resolved": "https://registry.npmjs.org/buildcheck/-/buildcheck-0.0.7.tgz", + "integrity": "sha512-lHblz4ahamxpTmnsk+MNTRWsjYKv965MwOrSJyeD588rR3Jcu7swE+0wN5F+PbL5cjgu/9ObkhfzEPuofEMwLA==", "optional": true, "engines": { "node": ">=10.0.0" } }, "node_modules/bullmq": { - "version": "5.66.4", + "version": "5.72.0", + "resolved": "https://registry.npmjs.org/bullmq/-/bullmq-5.72.0.tgz", + "integrity": "sha512-1Wmfym7bC8BFxDjKcF4iZNZmqXYo0rgPFlxfi8ET3AaP/vOY/MY33iWsWqAKwe8v/QO/8osipjwTAcFB7egINA==", "license": "MIT", "dependencies": { "cron-parser": "4.9.0", - "ioredis": "5.8.2", + "ioredis": "5.10.1", "msgpackr": "1.11.5", "node-abort-controller": "3.1.1", - "semver": "7.7.3", + "semver": "7.7.4", "tslib": "2.8.1", "uuid": "11.1.0" } @@ -4810,6 +5096,15 @@ "url": "https://github.com/sponsors/sindresorhus" } }, + "node_modules/bytes": { + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/bytes/-/bytes-3.1.2.tgz", + "integrity": "sha512-/Nf7TyzTx6S3yRJObOAV7956r8cr2+Oj8AC5dt8wSP3BQAoeX58NoHyCU8P8zGkNXStjTSi6fzO6F0pBdcYbEg==", + "license": "MIT", + "engines": { + "node": ">= 0.8" + } + }, "node_modules/cac": { "version": "6.7.14", "resolved": "https://registry.npmjs.org/cac/-/cac-6.7.14.tgz", @@ -4831,8 +5126,26 @@ "node": ">= 0.4" } }, + "node_modules/call-bound": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/call-bound/-/call-bound-1.0.4.tgz", + "integrity": "sha512-+ys997U96po4Kx/ABpBCqhA9EuxJaQWDQg7295H4hBphv3IZg0boBKuwYpt4YXp6MZ5AmZQnU/tyMTlRpaSejg==", + "license": "MIT", + "dependencies": { + "call-bind-apply-helpers": "^1.0.2", + "get-intrinsic": "^1.3.0" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, "node_modules/callsites": { "version": "3.1.0", + "resolved": "https://registry.npmjs.org/callsites/-/callsites-3.1.0.tgz", + "integrity": "sha512-P8BjAsXvZS+VIDUI11hHCQEv74YT67YUi5JJFNWIqL235sBmjX4+qx9Muvls5ivyNENctx46xQLQ3aTuE7ssaQ==", "dev": true, "license": "MIT", "engines": { @@ -5016,6 +5329,8 @@ }, "node_modules/cluster-key-slot": { "version": "1.1.2", + "resolved": "https://registry.npmjs.org/cluster-key-slot/-/cluster-key-slot-1.1.2.tgz", + "integrity": "sha512-RMr0FhtfXemyinomL4hrWcYJxmX6deFdCxpJzhDttxgO1+bcCnkk+9drydLVDmAMG7NE6aN/fl4F7ucU/90gAA==", "license": "Apache-2.0", "engines": { "node": ">=0.10.0" @@ -5046,7 +5361,9 @@ } }, "node_modules/commander": { - "version": "14.0.2", + "version": "14.0.3", + "resolved": "https://registry.npmjs.org/commander/-/commander-14.0.3.tgz", + "integrity": "sha512-H+y0Jo/T1RZ9qPP4Eh1pkcQcLRglraJaSLoyOtHxu6AapkjWVCy2Sit1QQ4x3Dng8qDlSsZEet7g5Pq06MvTgw==", "dev": true, "license": "MIT", "engines": { @@ -5055,6 +5372,8 @@ }, "node_modules/compare-func": { "version": "2.0.0", + "resolved": "https://registry.npmjs.org/compare-func/-/compare-func-2.0.0.tgz", + "integrity": "sha512-zHig5N+tPWARooBnb0Zx1MFcdfpyJrfTJ3Y5L+IFvUm8rM74hHz66z0gw0x4tijh5CorKkKUCnW82R2vmpeCRA==", "dev": true, "license": "MIT", "dependencies": { @@ -5062,17 +5381,6 @@ "dot-prop": "^5.1.0" } }, - "node_modules/compare-func/node_modules/dot-prop": { - "version": "5.3.0", - "dev": true, - "license": "MIT", - "dependencies": { - "is-obj": "^2.0.0" - }, - "engines": { - "node": ">=8" - } - }, "node_modules/compress-commons": { "version": "6.0.2", "license": "MIT", @@ -5238,43 +5546,87 @@ "node": ">=12" } }, + "node_modules/content-disposition": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/content-disposition/-/content-disposition-1.0.1.tgz", + "integrity": "sha512-oIXISMynqSqm241k6kcQ5UwttDILMK4BiurCfGEREw6+X9jkkpEe5T9FZaApyLGGOnFuyMWZpdolTXMtvEJ08Q==", + "license": "MIT", + "engines": { + "node": ">=18" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/express" + } + }, + "node_modules/content-type": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/content-type/-/content-type-1.0.5.tgz", + "integrity": "sha512-nTjqfcBFEipKdXCv4YDQWCfmcLZKm81ldF0pAopTvyrFGVbcR6P/VAAd5G7N+0tTr8QqiU0tFadD6FK4NtJwOA==", + "license": "MIT", + "engines": { + "node": ">= 0.6" + } + }, "node_modules/conventional-changelog-angular": { - "version": "7.0.0", + "version": "8.3.1", + "resolved": "https://registry.npmjs.org/conventional-changelog-angular/-/conventional-changelog-angular-8.3.1.tgz", + "integrity": "sha512-6gfI3otXK5Ph5DfCOI1dblr+kN3FAm5a97hYoQkqNZxOaYa5WKfXH+AnpsmS+iUH2mgVC2Cg2Qw9m5OKcmNrIg==", "dev": true, "license": "ISC", "dependencies": { "compare-func": "^2.0.0" }, "engines": { - "node": ">=16" + "node": ">=18" } }, "node_modules/conventional-changelog-conventionalcommits": { - "version": "7.0.2", + "version": "9.3.1", + "resolved": "https://registry.npmjs.org/conventional-changelog-conventionalcommits/-/conventional-changelog-conventionalcommits-9.3.1.tgz", + "integrity": "sha512-dTYtpIacRpcZgrvBYvBfArMmK2xvIpv2TaxM0/ZI5CBtNUzvF2x0t15HsbRABWprS6UPmvj+PzHVjSx4qAVKyw==", "dev": true, "license": "ISC", "dependencies": { "compare-func": "^2.0.0" }, "engines": { - "node": ">=16" + "node": ">=18" } }, "node_modules/conventional-commits-parser": { - "version": "5.0.0", + "version": "6.4.0", + "resolved": "https://registry.npmjs.org/conventional-commits-parser/-/conventional-commits-parser-6.4.0.tgz", + "integrity": "sha512-tvRg7FIBNlyPzjdG8wWRlPHQJJHI7DylhtRGeU9Lq+JuoPh5BKpPRX83ZdLrvXuOSu5Eo/e7SzOQhU4Hd2Miuw==", "dev": true, "license": "MIT", "dependencies": { - "is-text-path": "^2.0.0", - "JSONStream": "^1.3.5", - "meow": "^12.0.1", - "split2": "^4.0.0" + "@simple-libs/stream-utils": "^1.2.0", + "meow": "^13.0.0" }, "bin": { - "conventional-commits-parser": "cli.mjs" + "conventional-commits-parser": "dist/cli/index.js" }, "engines": { - "node": ">=16" + "node": ">=18" + } + }, + "node_modules/cookie": { + "version": "0.7.2", + "resolved": "https://registry.npmjs.org/cookie/-/cookie-0.7.2.tgz", + "integrity": "sha512-yki5XnKuf750l50uGTllt6kKILY4nQ1eNIQatoXEByZ5dWgnKqbnqmTrBE5B4N7lrMJKQ2ytWMiTO2o0v6Ew/w==", + "license": "MIT", + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/cookie-signature": { + "version": "1.2.2", + "resolved": "https://registry.npmjs.org/cookie-signature/-/cookie-signature-1.2.2.tgz", + "integrity": "sha512-D76uU73ulSXrD1UXF4KE2TMxVVwhsnCgfAyTg9k8P6KGZjlXKrOLe4dJQKI3Bxi5wjesZoFXJWElNWBjPZMbhg==", + "license": "MIT", + "engines": { + "node": ">=6.6.0" } }, "node_modules/core-js-pure": { @@ -5292,8 +5644,27 @@ "version": "1.0.3", "license": "MIT" }, + "node_modules/cors": { + "version": "2.8.6", + "resolved": "https://registry.npmjs.org/cors/-/cors-2.8.6.tgz", + "integrity": "sha512-tJtZBBHA6vjIAaF6EnIaq6laBBP9aq/Y3ouVJjEfoHbRBcHBAHYcMh/w8LDrk2PvIMMq8gmopa5D4V8RmbrxGw==", + "license": "MIT", + "dependencies": { + "object-assign": "^4", + "vary": "^1" + }, + "engines": { + "node": ">= 0.10" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/express" + } + }, "node_modules/cosmiconfig": { - "version": "9.0.0", + "version": "9.0.1", + "resolved": "https://registry.npmjs.org/cosmiconfig/-/cosmiconfig-9.0.1.tgz", + "integrity": "sha512-hr4ihw+DBqcvrsEDioRO31Z17x71pUYoNe/4h6Z0wB72p7MU7/9gH8Q3s12NFhHPfYBBOV3qyfUxmr/Yn3shnQ==", "dev": true, "license": "MIT", "dependencies": { @@ -5319,6 +5690,8 @@ }, "node_modules/cosmiconfig-typescript-loader": { "version": "6.2.0", + "resolved": "https://registry.npmjs.org/cosmiconfig-typescript-loader/-/cosmiconfig-typescript-loader-6.2.0.tgz", + "integrity": "sha512-GEN39v7TgdxgIoNcdkRE3uiAzQt3UXLyHbRHD6YoL048XAeOomyxaP+Hh/+2C6C2wYjxJ2onhJcsQp+L4YEkVQ==", "dev": true, "license": "MIT", "dependencies": { @@ -5335,6 +5708,8 @@ }, "node_modules/cpu-features": { "version": "0.0.10", + "resolved": "https://registry.npmjs.org/cpu-features/-/cpu-features-0.0.10.tgz", + "integrity": "sha512-9IkYqtX3YHPCzoVg1Py+o9057a3i0fp7S530UWokCSaFVTc7CwXPRiOjRjBQQ18ZCNafx78YfnG+HALxtVmOGA==", "hasInstallScript": true, "optional": true, "dependencies": { @@ -5461,17 +5836,6 @@ "dev": true, "license": "MIT" }, - "node_modules/dargs": { - "version": "8.1.0", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=12" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, "node_modules/data-uri-to-buffer": { "version": "4.0.1", "license": "MIT", @@ -5574,11 +5938,22 @@ }, "node_modules/denque": { "version": "2.1.0", + "resolved": "https://registry.npmjs.org/denque/-/denque-2.1.0.tgz", + "integrity": "sha512-HVQE3AAb/pxF8fQAoiqpvg9i3evqug3hoiwakOyZAwJm+6vZehbkYXZ0l4JxS+I3QxM97v5aaRNhj8v5oBhekw==", "license": "Apache-2.0", "engines": { "node": ">=0.10" } }, + "node_modules/depd": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/depd/-/depd-2.0.0.tgz", + "integrity": "sha512-g7nH6P6dyDioJogAAGprGpCtVImJhpPk/roCzdb3fIh61/s/nPsfR6onyMwkCAR/OlC3yBC0lESvUoQEAssIrw==", + "license": "MIT", + "engines": { + "node": ">= 0.8" + } + }, "node_modules/dequal": { "version": "2.0.3", "resolved": "https://registry.npmjs.org/dequal/-/dequal-2.0.3.tgz", @@ -5613,7 +5988,9 @@ "license": "Apache-2.0" }, "node_modules/docker-modem": { - "version": "5.0.6", + "version": "5.0.7", + "resolved": "https://registry.npmjs.org/docker-modem/-/docker-modem-5.0.7.tgz", + "integrity": "sha512-XJgGhoR/CLpqshm4d3L7rzH6t8NgDFUIIpztYlLHIApeJjMZKYJMz2zxPsYxnejq5h3ELYSw/RBsi3t5h7gNTA==", "license": "Apache-2.0", "dependencies": { "debug": "^4.1.1", @@ -5626,13 +6003,15 @@ } }, "node_modules/dockerode": { - "version": "4.0.9", + "version": "4.0.10", + "resolved": "https://registry.npmjs.org/dockerode/-/dockerode-4.0.10.tgz", + "integrity": "sha512-8L/P9JynLBiG7/coiA4FlQXegHltRqS0a+KqI44P1zgQh8QLHTg7FKOwhkBgSJwZTeHsq30WRoVFLuwkfK0YFg==", "license": "Apache-2.0", "dependencies": { "@balena/dockerignore": "^1.0.2", "@grpc/grpc-js": "^1.11.1", "@grpc/proto-loader": "^0.7.13", - "docker-modem": "^5.0.6", + "docker-modem": "^5.0.7", "protobufjs": "^7.3.2", "tar-fs": "^2.1.4", "uuid": "^10.0.0" @@ -5659,17 +6038,30 @@ "dev": true, "license": "MIT" }, + "node_modules/dot-prop": { + "version": "5.3.0", + "resolved": "https://registry.npmjs.org/dot-prop/-/dot-prop-5.3.0.tgz", + "integrity": "sha512-QM8q3zDe58hqUqjraQOmzZ1LIH9SWQJTlEKCH4kJ2oQvLZk7RbQXvtDM2XEq3fwkV9CCvvH4LA0AV+ogFsBM2Q==", + "dev": true, + "license": "MIT", + "dependencies": { + "is-obj": "^2.0.0" + }, + "engines": { + "node": ">=8" + } + }, "node_modules/drizzle-kit": { - "version": "0.31.9", - "resolved": "https://registry.npmjs.org/drizzle-kit/-/drizzle-kit-0.31.9.tgz", - "integrity": "sha512-GViD3IgsXn7trFyBUUHyTFBpH/FsHTxYJ66qdbVggxef4UBPHRYxQaRzYLTuekYnk9i5FIEL9pbBIwMqX/Uwrg==", + "version": "0.31.10", + "resolved": "https://registry.npmjs.org/drizzle-kit/-/drizzle-kit-0.31.10.tgz", + "integrity": "sha512-7OZcmQUrdGI+DUNNsKBn1aW8qSoKuTH7d0mYgSP8bAzdFzKoovxEFnoGQp2dVs82EOJeYycqRtciopszwUf8bw==", "dev": true, "license": "MIT", "dependencies": { "@drizzle-team/brocli": "^0.10.2", "@esbuild-kit/esm-loader": "^2.5.5", "esbuild": "^0.25.4", - "esbuild-register": "^3.5.0" + "tsx": "^4.21.0" }, "bin": { "drizzle-kit": "bin.cjs" @@ -6160,9 +6552,9 @@ } }, "node_modules/drizzle-orm": { - "version": "0.45.1", - "resolved": "https://registry.npmjs.org/drizzle-orm/-/drizzle-orm-0.45.1.tgz", - "integrity": "sha512-Te0FOdKIistGNPMq2jscdqngBRfBpC8uMFVwqjf6gtTVJHIQ/dosgV/CLBU2N4ZJBsXL5savCba9b0YJskKdcA==", + "version": "0.45.2", + "resolved": "https://registry.npmjs.org/drizzle-orm/-/drizzle-orm-0.45.2.tgz", + "integrity": "sha512-kY0BSaTNYWnoDMVoyY8uxmyHjpJW1geOmBMdSSicKo9CIIWkSxMIj2rkeSR51b8KAPB7m+qysjuHme5nKP+E5Q==", "license": "Apache-2.0", "peerDependencies": { "@aws-sdk/client-rds-data": ">=3", @@ -6307,6 +6699,12 @@ "safe-buffer": "^5.0.1" } }, + "node_modules/ee-first": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/ee-first/-/ee-first-1.1.1.tgz", + "integrity": "sha512-WMwm9LhRUo+WUaRN+vRuETqG89IgZphVSNkdFgeb6sS/E4OrDIN7t48CAewSHXc6C8lefD8KKfr5vY61brQlow==", + "license": "MIT" + }, "node_modules/ejs": { "version": "3.1.10", "resolved": "https://registry.npmjs.org/ejs/-/ejs-3.1.10.tgz", @@ -6332,6 +6730,15 @@ "integrity": "sha512-5U0rVMU5Y2n2+ykNLQqMoqklN9ICBT/KsvC1Gz6vqHbz2AXXGkG+Pm5rMWk/8Vjrr/mY9985Hi8DYzn1F09Nyw==", "license": "MIT" }, + "node_modules/encodeurl": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/encodeurl/-/encodeurl-2.0.0.tgz", + "integrity": "sha512-Q0n9HRi4m6JuGIV1eFlmvJB7ZEVxu93IrMyiMsGC0lrMJMWzRgx6WGquyfQgZVb31vhGgXnfmPNNXmxnOkRBrg==", + "license": "MIT", + "engines": { + "node": ">= 0.8" + } + }, "node_modules/end-of-stream": { "version": "1.4.5", "license": "MIT", @@ -6354,6 +6761,8 @@ }, "node_modules/env-paths": { "version": "2.2.1", + "resolved": "https://registry.npmjs.org/env-paths/-/env-paths-2.2.1.tgz", + "integrity": "sha512-+h1lkLKhZMTYjog1VEpJNG7NZJWcuc2DDk/qsqSTRRCOXiLjeQ1d1/udrUGhqMxUgAlwKNZ0cf2uqan5GLuS2A==", "dev": true, "license": "MIT", "engines": { @@ -6374,6 +6783,8 @@ }, "node_modules/error-ex": { "version": "1.3.4", + "resolved": "https://registry.npmjs.org/error-ex/-/error-ex-1.3.4.tgz", + "integrity": "sha512-sqQamAnR14VgCr1A618A3sGrygcpK+HEbenA/HiEAkkUwcZIIB/tgWqHFxWgOyDh4nB4JCRimh79dR5Ywc9MDQ==", "dev": true, "license": "MIT", "dependencies": { @@ -6464,19 +6875,6 @@ "@esbuild/win32-x64": "0.27.2" } }, - "node_modules/esbuild-register": { - "version": "3.6.0", - "resolved": "https://registry.npmjs.org/esbuild-register/-/esbuild-register-3.6.0.tgz", - "integrity": "sha512-H2/S7Pm8a9CL1uhp9OvjwrBh5Pvx0H8qVOxNu8Wed9Y7qv56MPtq+GGM8RJpq6glYJn9Wspr8uw7l55uyinNeg==", - "dev": true, - "license": "MIT", - "dependencies": { - "debug": "^4.3.4" - }, - "peerDependencies": { - "esbuild": ">=0.12 <1" - } - }, "node_modules/escalade": { "version": "3.2.0", "license": "MIT", @@ -6484,6 +6882,12 @@ "node": ">=6" } }, + "node_modules/escape-html": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/escape-html/-/escape-html-1.0.3.tgz", + "integrity": "sha512-NiSupZ4OeuGwr68lGIeym/ksIZMJodUGOSCZ/FSnTxcrekbvqrgdUxlJOMpijaKZVjAJrWrGs/6Jy8OMuyj9ow==", + "license": "MIT" + }, "node_modules/escape-string-regexp": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-4.0.0.tgz", @@ -6507,7 +6911,9 @@ } }, "node_modules/eta": { - "version": "4.5.0", + "version": "4.5.1", + "resolved": "https://registry.npmjs.org/eta/-/eta-4.5.1.tgz", + "integrity": "sha512-EaNCGm+8XEIU7YNcc+THptWAO5NfKBHHARxt+wxZljj9bTr/+arRoOm9/MpGt4n6xn9fLnPFRSoLD0WFYGFUxQ==", "license": "MIT", "engines": { "node": ">=20" @@ -6516,6 +6922,15 @@ "url": "https://github.com/bgub/eta?sponsor=1" } }, + "node_modules/etag": { + "version": "1.8.1", + "resolved": "https://registry.npmjs.org/etag/-/etag-1.8.1.tgz", + "integrity": "sha512-aIL5Fx7mawVa300al2BnEE4iNvo1qETxLrPI/o05L7z6go7fCw1J6EQmbK4FmJ2AS7kgVF/KEZWufBfdClMcPg==", + "license": "MIT", + "engines": { + "node": ">= 0.6" + } + }, "node_modules/event-target-shim": { "version": "5.0.1", "license": "MIT", @@ -6537,6 +6952,27 @@ "bare-events": "^2.7.0" } }, + "node_modules/eventsource": { + "version": "3.0.7", + "resolved": "https://registry.npmjs.org/eventsource/-/eventsource-3.0.7.tgz", + "integrity": "sha512-CRT1WTyuQoD771GW56XEZFQ/ZoSfWid1alKGDYMmkt2yl8UXrVR4pspqWNEcqKvVIzg6PAltWjxcSSPrboA4iA==", + "license": "MIT", + "dependencies": { + "eventsource-parser": "^3.0.1" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/eventsource-parser": { + "version": "3.0.6", + "resolved": "https://registry.npmjs.org/eventsource-parser/-/eventsource-parser-3.0.6.tgz", + "integrity": "sha512-Vo1ab+QXPzZ4tCa8SwIHJFaSzy4R6SHf7BY79rFBDf0idraZWAkYrDjDj8uWaSm3S2TK+hJ7/t1CEmZ7jXw+pg==", + "license": "MIT", + "engines": { + "node": ">=18.0.0" + } + }, "node_modules/expect-type": { "version": "1.3.0", "dev": true, @@ -6545,6 +6981,92 @@ "node": ">=12.0.0" } }, + "node_modules/express": { + "version": "5.2.1", + "resolved": "https://registry.npmjs.org/express/-/express-5.2.1.tgz", + "integrity": "sha512-hIS4idWWai69NezIdRt2xFVofaF4j+6INOpJlVOLDO8zXGpUVEVzIYk12UUi2JzjEzWL3IOAxcTubgz9Po0yXw==", + "license": "MIT", + "dependencies": { + "accepts": "^2.0.0", + "body-parser": "^2.2.1", + "content-disposition": "^1.0.0", + "content-type": "^1.0.5", + "cookie": "^0.7.1", + "cookie-signature": "^1.2.1", + "debug": "^4.4.0", + "depd": "^2.0.0", + "encodeurl": "^2.0.0", + "escape-html": "^1.0.3", + "etag": "^1.8.1", + "finalhandler": "^2.1.0", + "fresh": "^2.0.0", + "http-errors": "^2.0.0", + "merge-descriptors": "^2.0.0", + "mime-types": "^3.0.0", + "on-finished": "^2.4.1", + "once": "^1.4.0", + "parseurl": "^1.3.3", + "proxy-addr": "^2.0.7", + "qs": "^6.14.0", + "range-parser": "^1.2.1", + "router": "^2.2.0", + "send": "^1.1.0", + "serve-static": "^2.2.0", + "statuses": "^2.0.1", + "type-is": "^2.0.1", + "vary": "^1.1.2" + }, + "engines": { + "node": ">= 18" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/express" + } + }, + "node_modules/express-rate-limit": { + "version": "8.3.2", + "resolved": "https://registry.npmjs.org/express-rate-limit/-/express-rate-limit-8.3.2.tgz", + "integrity": "sha512-77VmFeJkO0/rvimEDuUC5H30oqUC4EyOhyGccfqoLebB0oiEYfM7nwPrsDsBL1gsTpwfzX8SFy2MT3TDyRq+bg==", + "license": "MIT", + "dependencies": { + "ip-address": "10.1.0" + }, + "engines": { + "node": ">= 16" + }, + "funding": { + "url": "https://github.com/sponsors/express-rate-limit" + }, + "peerDependencies": { + "express": ">= 4.11" + } + }, + "node_modules/express/node_modules/mime-db": { + "version": "1.54.0", + "resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.54.0.tgz", + "integrity": "sha512-aU5EJuIN2WDemCcAp2vFBfp/m4EAhWJnUNSSw0ixs7/kXbd6Pg64EmwJkNdFhB8aWt1sH2CTXrLxo/iAGV3oPQ==", + "license": "MIT", + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/express/node_modules/mime-types": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-3.0.2.tgz", + "integrity": "sha512-Lbgzdk0h4juoQ9fCKXW4by0UJqj+nOOrI9MJ1sSj4nI8aI2eo1qmvQEie4VD1glsS250n15LsWsYtCugiStS5A==", + "license": "MIT", + "dependencies": { + "mime-db": "^1.54.0" + }, + "engines": { + "node": ">=18" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/express" + } + }, "node_modules/extend": { "version": "3.0.2", "license": "MIT" @@ -6573,7 +7095,6 @@ }, "node_modules/fast-uri": { "version": "3.1.0", - "dev": true, "funding": [ { "type": "github", @@ -6645,20 +7166,25 @@ "node": ">=10" } }, - "node_modules/find-up": { - "version": "7.0.0", - "dev": true, + "node_modules/finalhandler": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/finalhandler/-/finalhandler-2.1.1.tgz", + "integrity": "sha512-S8KoZgRZN+a5rNwqTxlZZePjT/4cnm0ROV70LedRHZ0p8u9fRID0hJUZQpkKLzro8LfmC8sx23bY6tVNxv8pQA==", "license": "MIT", "dependencies": { - "locate-path": "^7.2.0", - "path-exists": "^5.0.0", - "unicorn-magic": "^0.1.0" + "debug": "^4.4.0", + "encodeurl": "^2.0.0", + "escape-html": "^1.0.3", + "on-finished": "^2.4.1", + "parseurl": "^1.3.3", + "statuses": "^2.0.1" }, "engines": { - "node": ">=18" + "node": ">= 18.0.0" }, "funding": { - "url": "https://github.com/sponsors/sindresorhus" + "type": "opencollective", + "url": "https://opencollective.com/express" } }, "node_modules/follow-redirects": { @@ -6717,12 +7243,30 @@ "node": ">=12.20.0" } }, + "node_modules/forwarded": { + "version": "0.2.0", + "resolved": "https://registry.npmjs.org/forwarded/-/forwarded-0.2.0.tgz", + "integrity": "sha512-buRG0fpBtRHSTCOASe6hD258tEubFoRLb4ZNA6NxMVHNw2gOcwHo9wyablzMzOA5z9xA9L1KNjk/Nt6MT9aYow==", + "license": "MIT", + "engines": { + "node": ">= 0.6" + } + }, "node_modules/forwarded-parse": { "version": "2.1.2", "resolved": "https://registry.npmjs.org/forwarded-parse/-/forwarded-parse-2.1.2.tgz", "integrity": "sha512-alTFZZQDKMporBH77856pXgzhEzaUVmLCDk+egLgIgHst3Tpndzz8MnKe+GzRJRfvVdn69HhpW7cmXzvtLvJAw==", "license": "MIT" }, + "node_modules/fresh": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/fresh/-/fresh-2.0.0.tgz", + "integrity": "sha512-Rx/WycZ60HOaqLKAi6cHRKKI7zxWbJ31MhntmtwMoaTeF7XFH9hhBp8vITaMidfljRQ6eYWCKkaTK+ykVJHP2A==", + "license": "MIT", + "engines": { + "node": ">= 0.8" + } + }, "node_modules/fs-constants": { "version": "1.0.0", "license": "MIT" @@ -6844,23 +7388,26 @@ } }, "node_modules/git-raw-commits": { - "version": "4.0.0", + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/git-raw-commits/-/git-raw-commits-5.0.1.tgz", + "integrity": "sha512-Y+csSm2GD/PCSh6Isd/WiMjNAydu0VBiG9J7EdQsNA5P9uXvLayqjmTsNlK5Gs9IhblFZqOU0yid5Il5JPoLiQ==", "dev": true, "license": "MIT", "dependencies": { - "dargs": "^8.0.0", - "meow": "^12.0.1", - "split2": "^4.0.0" + "@conventional-changelog/git-client": "^2.6.0", + "meow": "^13.0.0" }, "bin": { - "git-raw-commits": "cli.mjs" + "git-raw-commits": "src/cli.js" }, "engines": { - "node": ">=16" + "node": ">=18" } }, "node_modules/global-directory": { "version": "4.0.1", + "resolved": "https://registry.npmjs.org/global-directory/-/global-directory-4.0.1.tgz", + "integrity": "sha512-wHTUcDUoZ1H5/0iVqEudYW4/kAlN5cZ3j/bXn0Dpbizl9iaUVeWSHqiOjsgk6OW2bkLclbBjzewBz6weQ1zA2Q==", "dev": true, "license": "MIT", "dependencies": { @@ -6873,14 +7420,6 @@ "url": "https://github.com/sponsors/sindresorhus" } }, - "node_modules/global-directory/node_modules/ini": { - "version": "4.1.1", - "dev": true, - "license": "ISC", - "engines": { - "node": "^14.17.0 || ^16.13.0 || >=18.0.0" - } - }, "node_modules/google-auth-library": { "version": "10.5.0", "license": "Apache-2.0", @@ -6979,9 +7518,9 @@ } }, "node_modules/hono": { - "version": "4.12.7", - "resolved": "https://registry.npmjs.org/hono/-/hono-4.12.7.tgz", - "integrity": "sha512-jq9l1DM0zVIvsm3lv9Nw9nlJnMNPOcAtsbsgiUhWcFzPE99Gvo6yRTlszSLLYacMeQ6quHD6hMfId8crVHvexw==", + "version": "4.12.9", + "resolved": "https://registry.npmjs.org/hono/-/hono-4.12.9.tgz", + "integrity": "sha512-wy3T8Zm2bsEvxKZM5w21VdHDDcwVS1yUFFY6i8UobSsKfFceT7TOwhbhfKsDyx7tYQlmRM5FLpIuYvNFyjctiA==", "license": "MIT", "engines": { "node": ">=16.9.0" @@ -7005,6 +7544,26 @@ "dev": true, "license": "MIT" }, + "node_modules/http-errors": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/http-errors/-/http-errors-2.0.1.tgz", + "integrity": "sha512-4FbRdAX+bSdmo4AUFuS0WNiPz8NgFt+r8ThgNWmlrjQjt1Q7ZR9+zTlce2859x4KSXrwIsaeTqDoKQmtP8pLmQ==", + "license": "MIT", + "dependencies": { + "depd": "~2.0.0", + "inherits": "~2.0.4", + "setprototypeof": "~1.2.0", + "statuses": "~2.0.2", + "toidentifier": "~1.0.1" + }, + "engines": { + "node": ">= 0.8" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/express" + } + }, "node_modules/http-proxy-agent": { "version": "7.0.2", "resolved": "https://registry.npmjs.org/http-proxy-agent/-/http-proxy-agent-7.0.2.tgz", @@ -7030,6 +7589,22 @@ "node": ">= 14" } }, + "node_modules/iconv-lite": { + "version": "0.7.2", + "resolved": "https://registry.npmjs.org/iconv-lite/-/iconv-lite-0.7.2.tgz", + "integrity": "sha512-im9DjEDQ55s9fL4EYzOAv0yMqmMBSZp6G0VvFyTMPKWxiSBHUj9NW/qqLmXUwXrrM7AvqSlTCfvqRb0cM8yYqw==", + "license": "MIT", + "dependencies": { + "safer-buffer": ">= 2.1.2 < 3.0.0" + }, + "engines": { + "node": ">=0.10.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/express" + } + }, "node_modules/ieee754": { "version": "1.2.1", "funding": [ @@ -7050,6 +7625,8 @@ }, "node_modules/import-fresh": { "version": "3.3.1", + "resolved": "https://registry.npmjs.org/import-fresh/-/import-fresh-3.3.1.tgz", + "integrity": "sha512-TR3KfrTZTYLPB6jUjfx6MF9WcWrHL9su5TObK4ZkYgBdWKPOFoSoQIdEuTuR82pmtxH2spWG9h6etwfr1pLBqQ==", "dev": true, "license": "MIT", "dependencies": { @@ -7065,6 +7642,8 @@ }, "node_modules/import-fresh/node_modules/resolve-from": { "version": "4.0.0", + "resolved": "https://registry.npmjs.org/resolve-from/-/resolve-from-4.0.0.tgz", + "integrity": "sha512-pb/MYmXstAkysRFx8piNI1tGFNQIFA3vkE3Gq4EuA1dF6gHp/+vgZqsCGJapvy8N3Q+4o7FwvquPJcnZ7RYy4g==", "dev": true, "license": "MIT", "engines": { @@ -7072,19 +7651,24 @@ } }, "node_modules/import-in-the-middle": { - "version": "2.0.6", - "resolved": "https://registry.npmjs.org/import-in-the-middle/-/import-in-the-middle-2.0.6.tgz", - "integrity": "sha512-3vZV3jX0XRFW3EJDTwzWoZa+RH1b8eTTx6YOCjglrLyPuepwoBti1k3L2dKwdCUrnVEfc5CuRuGstaC/uQJJaw==", + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/import-in-the-middle/-/import-in-the-middle-3.0.0.tgz", + "integrity": "sha512-OnGy+eYT7wVejH2XWgLRgbmzujhhVIATQH0ztIeRilwHBjTeG3pD+XnH3PKX0r9gJ0BuJmJ68q/oh9qgXnNDQg==", "license": "Apache-2.0", "dependencies": { "acorn": "^8.15.0", "acorn-import-attributes": "^1.9.5", "cjs-module-lexer": "^2.2.0", "module-details-from-path": "^1.0.4" + }, + "engines": { + "node": ">=18" } }, "node_modules/import-meta-resolve": { "version": "4.2.0", + "resolved": "https://registry.npmjs.org/import-meta-resolve/-/import-meta-resolve-4.2.0.tgz", + "integrity": "sha512-Iqv2fzaTQN28s/FwZAoFq0ZSs/7hMAHJVX+w8PZl3cY19Pxk6jFFalxQoIfW2826i/fDLXv8IiEZRIT0lDuWcg==", "dev": true, "license": "MIT", "funding": { @@ -7105,11 +7689,23 @@ "version": "2.0.4", "license": "ISC" }, + "node_modules/ini": { + "version": "4.1.1", + "resolved": "https://registry.npmjs.org/ini/-/ini-4.1.1.tgz", + "integrity": "sha512-QQnnxNyfvmHFIsj7gkPcYymR8Jdw/o7mp5ZFihxn6h8Ci6fh3Dx4E1gPjpQEpIuPo9XVNY/ZUwh4BPMjGyL01g==", + "dev": true, + "license": "ISC", + "engines": { + "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + } + }, "node_modules/ioredis": { - "version": "5.8.2", + "version": "5.10.1", + "resolved": "https://registry.npmjs.org/ioredis/-/ioredis-5.10.1.tgz", + "integrity": "sha512-HuEDBTI70aYdx1v6U97SbNx9F1+svQKBDo30o0b9fw055LMepzpOOd0Ccg9Q6tbqmBSJaMuY0fB7yw9/vjBYCA==", "license": "MIT", "dependencies": { - "@ioredis/commands": "1.4.0", + "@ioredis/commands": "1.5.1", "cluster-key-slot": "^1.1.0", "debug": "^4.3.4", "denque": "^2.1.0", @@ -7127,8 +7723,28 @@ "url": "https://opencollective.com/ioredis" } }, + "node_modules/ip-address": { + "version": "10.1.0", + "resolved": "https://registry.npmjs.org/ip-address/-/ip-address-10.1.0.tgz", + "integrity": "sha512-XXADHxXmvT9+CRxhXg56LJovE+bmWnEWB78LB83VZTprKTmaC5QfruXocxzTZ2Kl0DNwKuBdlIhjL8LeY8Sf8Q==", + "license": "MIT", + "engines": { + "node": ">= 12" + } + }, + "node_modules/ipaddr.js": { + "version": "1.9.1", + "resolved": "https://registry.npmjs.org/ipaddr.js/-/ipaddr.js-1.9.1.tgz", + "integrity": "sha512-0KI/607xoxSToH7GjN1FfSbLoU0+btTicjsQSWQlh/hZykN8KpmMf7uYwPW3R+akZ6R/w18ZlXSHBYXiYUPO3g==", + "license": "MIT", + "engines": { + "node": ">= 0.10" + } + }, "node_modules/is-arrayish": { "version": "0.2.1", + "resolved": "https://registry.npmjs.org/is-arrayish/-/is-arrayish-0.2.1.tgz", + "integrity": "sha512-zz06S8t0ozoDXMG+ube26zeCTNXcKIPJZJi8hBrF4idCLms4CG9QtK7qBl1boi5ODzFpjswb5JPmHCbMpjaYzg==", "dev": true, "license": "MIT" }, @@ -7225,12 +7841,27 @@ }, "node_modules/is-obj": { "version": "2.0.0", + "resolved": "https://registry.npmjs.org/is-obj/-/is-obj-2.0.0.tgz", + "integrity": "sha512-drqDG3cbczxxEJRoOXcOjtdp1J/lyp1mNn0xaznRs8+muBhgQcrnbspox5X5fOw0HnMnbfDzvnEMEtqDEJEo8w==", "dev": true, "license": "MIT", "engines": { "node": ">=8" } }, + "node_modules/is-plain-obj": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/is-plain-obj/-/is-plain-obj-4.1.0.tgz", + "integrity": "sha512-+Pgi+vMuUNkJyExiMBt5IlFoMyKnr5zhJ4Uspz58WOhBF5QoIZkFyNHIbBAtHwzVAgk5RtndVNsDRN61/mmDqg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, "node_modules/is-potential-custom-element-name": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/is-potential-custom-element-name/-/is-potential-custom-element-name-1.0.1.tgz", @@ -7238,6 +7869,12 @@ "dev": true, "license": "MIT" }, + "node_modules/is-promise": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/is-promise/-/is-promise-4.0.0.tgz", + "integrity": "sha512-hvpoI6korhJMnej285dSg6nu1+e6uxs7zG3BYAm5byqDsgJNWwxzM6z6iZiAgQR4TJ30JmBTOwqZUw3WlyH3AQ==", + "license": "MIT" + }, "node_modules/is-stream": { "version": "2.0.1", "license": "MIT", @@ -7248,17 +7885,6 @@ "url": "https://github.com/sponsors/sindresorhus" } }, - "node_modules/is-text-path": { - "version": "2.0.0", - "dev": true, - "license": "MIT", - "dependencies": { - "text-extensions": "^2.0.0" - }, - "engines": { - "node": ">=8" - } - }, "node_modules/is-unicode-supported": { "version": "2.1.0", "resolved": "https://registry.npmjs.org/is-unicode-supported/-/is-unicode-supported-2.1.0.tgz", @@ -7368,13 +7994,13 @@ } }, "node_modules/jira.js": { - "version": "5.3.0", - "resolved": "https://registry.npmjs.org/jira.js/-/jira.js-5.3.0.tgz", - "integrity": "sha512-yalIuW4UvIDf31WHvozHwFp/2JGJmLhKGxduZOwXwIlV2mwpm2Pf5kUErDBN9XgB1jZFShINrfA3n5IRs8SCMQ==", + "version": "5.3.1", + "resolved": "https://registry.npmjs.org/jira.js/-/jira.js-5.3.1.tgz", + "integrity": "sha512-ldeH/vcSMLb95OVITF9Q34aNRwPZwCO8PpsY+ZXpzkbGtSzMx74R1pm7ev/0OrDrsYGXSJ+noMS3LH/LnAyG1w==", "license": "MIT", "dependencies": { - "axios": "^1.13.3", - "mime": "^4.1.0", + "axios": "^1.13.5", + "mime-types": "^2.1.35", "zod": "^4.3.6" }, "engines": { @@ -7397,6 +8023,15 @@ "jiti": "lib/jiti-cli.mjs" } }, + "node_modules/jose": { + "version": "6.2.2", + "resolved": "https://registry.npmjs.org/jose/-/jose-6.2.2.tgz", + "integrity": "sha512-d7kPDd34KO/YnzaDOlikGpOurfF0ByC2sEV4cANCtdqLlTfBlw2p14O/5d/zv40gJPbIQxfES3nSx1/oYNyuZQ==", + "license": "MIT", + "funding": { + "url": "https://github.com/sponsors/panva" + } + }, "node_modules/js-tokens": { "version": "4.0.0", "dev": true, @@ -7487,6 +8122,8 @@ }, "node_modules/json-parse-even-better-errors": { "version": "2.3.1", + "resolved": "https://registry.npmjs.org/json-parse-even-better-errors/-/json-parse-even-better-errors-2.3.1.tgz", + "integrity": "sha512-xyFwyhro/JEof6Ghe2iz2NcXoj2sloNsWr/XsERDK/oiPCfaNhl5ONfp+jQdAZRQQ0IJWNzH9zIZF7li91kh2w==", "dev": true, "license": "MIT" }, @@ -7505,31 +8142,13 @@ }, "node_modules/json-schema-traverse": { "version": "1.0.0", - "dev": true, "license": "MIT" }, - "node_modules/jsonparse": { - "version": "1.3.1", - "dev": true, - "engines": [ - "node >= 0.2.0" - ], - "license": "MIT" - }, - "node_modules/JSONStream": { - "version": "1.3.5", - "dev": true, - "license": "(MIT OR Apache-2.0)", - "dependencies": { - "jsonparse": "^1.2.0", - "through": ">=2.2.7 <3" - }, - "bin": { - "JSONStream": "bin.js" - }, - "engines": { - "node": "*" - } + "node_modules/json-schema-typed": { + "version": "8.0.2", + "resolved": "https://registry.npmjs.org/json-schema-typed/-/json-schema-typed-8.0.2.tgz", + "integrity": "sha512-fQhoXdcvc3V28x7C7BMs4P5+kNlgUURe2jmUT1T//oBRMDrqy1QPelJimwZGo7Hg9VPV3EQV5Bnq4hbFy2vetA==", + "license": "BSD-2-Clause" }, "node_modules/jwa": { "version": "2.0.1", @@ -7755,13 +8374,15 @@ }, "node_modules/lines-and-columns": { "version": "1.2.4", + "resolved": "https://registry.npmjs.org/lines-and-columns/-/lines-and-columns-1.2.4.tgz", + "integrity": "sha512-7ylylesZQ/PV29jhEDl3Ufjo6ZX7gCqJr5F7PKrqc93v7fzSymt1BpwEU8nAUXs8qzzvqhbjhK5QZg6Mt/HkBg==", "dev": true, "license": "MIT" }, "node_modules/llmist": { - "version": "16.1.0", - "resolved": "https://registry.npmjs.org/llmist/-/llmist-16.1.0.tgz", - "integrity": "sha512-saSSxHR8onoD4KbVAz5wuB64SIdC33LRZk1EpukjsS0JSJb6r2Be/0ry55sPY2fiARHN6S7kWdxIJLRNSFISog==", + "version": "16.2.0", + "resolved": "https://registry.npmjs.org/llmist/-/llmist-16.2.0.tgz", + "integrity": "sha512-/gFsrCDQkLNhMIsnbS7BD3Whazw3UvhzBQAVxwFz7HHBsyzirmSifI/ug9rFfRZLm7kvJI/y4MPrVMlSbJ36nA==", "license": "MIT", "dependencies": { "@anthropic-ai/sdk": "^0.69.0", @@ -7774,6 +8395,7 @@ "js-yaml": "^4.1.0", "marked": "^15.0.12", "marked-terminal": "^7.3.0", + "minimatch": "^9.0.9", "openai": "^6.0.0", "p-retry": "^7.1.1", "tiktoken": "^1.0.22", @@ -7784,6 +8406,21 @@ "node": ">=22.0.0" } }, + "node_modules/llmist/node_modules/minimatch": { + "version": "9.0.9", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-9.0.9.tgz", + "integrity": "sha512-OBwBN9AL4dqmETlpS2zasx+vTeWclWzkblfZk7KTA5j3jeOONz/tRCnZomUyvNg83wL5Zv9Ss6HMJXAgL8R2Yg==", + "license": "ISC", + "dependencies": { + "brace-expansion": "^2.0.2" + }, + "engines": { + "node": ">=16 || 14 >=14.17" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, "node_modules/llmist/node_modules/zod": { "version": "4.3.6", "resolved": "https://registry.npmjs.org/zod/-/zod-4.3.6.tgz", @@ -7793,20 +8430,6 @@ "url": "https://github.com/sponsors/colinhacks" } }, - "node_modules/locate-path": { - "version": "7.2.0", - "dev": true, - "license": "MIT", - "dependencies": { - "p-locate": "^6.0.0" - }, - "engines": { - "node": "^12.20.0 || ^14.13.1 || >=16.0.0" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, "node_modules/lodash": { "version": "4.18.1", "resolved": "https://registry.npmjs.org/lodash/-/lodash-4.18.1.tgz", @@ -7825,49 +8448,48 @@ }, "node_modules/lodash.defaults": { "version": "4.2.0", + "resolved": "https://registry.npmjs.org/lodash.defaults/-/lodash.defaults-4.2.0.tgz", + "integrity": "sha512-qjxPLHd3r5DnsdGacqOMU6pb/avJzdh9tFX2ymgoZE27BmjXrNy/y4LoaiTeAb+O3gL8AfpJGtqfX/ae2leYYQ==", "license": "MIT" }, "node_modules/lodash.isarguments": { "version": "3.1.0", - "license": "MIT" - }, - "node_modules/lodash.isplainobject": { - "version": "4.0.6", - "dev": true, + "resolved": "https://registry.npmjs.org/lodash.isarguments/-/lodash.isarguments-3.1.0.tgz", + "integrity": "sha512-chi4NHZlZqZD18a0imDHnZPrDeBbTtVN7GXMwuGdRH9qotxAjYs3aVLKc7zNOG9eddR5Ksd8rvFEBc9SsggPpg==", "license": "MIT" }, "node_modules/lodash.kebabcase": { "version": "4.1.1", - "dev": true, - "license": "MIT" - }, - "node_modules/lodash.merge": { - "version": "4.6.2", + "resolved": "https://registry.npmjs.org/lodash.kebabcase/-/lodash.kebabcase-4.1.1.tgz", + "integrity": "sha512-N8XRTIMMqqDgSy4VLKPnJ/+hpGZN+PHQiJnSenYqPaVV/NCqEogTnAdZLQiGKhxX+JCs8waWq2t1XHWKOmlY8g==", "dev": true, "license": "MIT" }, "node_modules/lodash.mergewith": { "version": "4.6.2", + "resolved": "https://registry.npmjs.org/lodash.mergewith/-/lodash.mergewith-4.6.2.tgz", + "integrity": "sha512-GK3g5RPZWTRSeLSpgP8Xhra+pnjBC56q9FZYe1d5RN3TJ35dbkGy3YqBSMbyCrlbi+CM9Z3Jk5yTL7RCsqboyQ==", "dev": true, "license": "MIT" }, "node_modules/lodash.snakecase": { "version": "4.1.1", + "resolved": "https://registry.npmjs.org/lodash.snakecase/-/lodash.snakecase-4.1.1.tgz", + "integrity": "sha512-QZ1d4xoBHYUeuouhEq3lk3Uq7ldgyFXGBhg04+oRLnIz8o9T65Eh+8YdroUwn846zchkA9yDsDl5CVVaV2nqYw==", "dev": true, "license": "MIT" }, "node_modules/lodash.startcase": { "version": "4.4.0", - "dev": true, - "license": "MIT" - }, - "node_modules/lodash.uniq": { - "version": "4.5.0", + "resolved": "https://registry.npmjs.org/lodash.startcase/-/lodash.startcase-4.4.0.tgz", + "integrity": "sha512-+WKqsK294HMSc2jEbNgpHpd0JfIBhp7rEV4aqXWqFr6AlXov+SlcgB1Fv01y2kGe3Gc8nMW7VA0SrGuSkRfIEg==", "dev": true, "license": "MIT" }, "node_modules/lodash.upperfirst": { "version": "4.3.1", + "resolved": "https://registry.npmjs.org/lodash.upperfirst/-/lodash.upperfirst-4.3.1.tgz", + "integrity": "sha512-sReKOYJIJf74dhJONhU4e0/shzi1trVbSWDOhKYE5XV2O+H7Sb2Dihwuc7xWxVl+DgFPyTqIN3zMfT9cq5iWDg==", "dev": true, "license": "MIT" }, @@ -7989,9 +8611,9 @@ } }, "node_modules/marklassian": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/marklassian/-/marklassian-1.1.0.tgz", - "integrity": "sha512-aR3o6Ig3GM5+iwZFTKBlAHy2Bcdd7auwra8RLWmcmygVq8AeQQKusdhzJ5s9AL1Xeu85Ax7mpuTmTorvKMByMA==", + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/marklassian/-/marklassian-1.2.1.tgz", + "integrity": "sha512-xqsslmU//fP0yWV7Qbw+Eq250cuYzi7uEOBElykAAM5cU2i30p1CDYjb1gRrkrt3Gg33XaqK0jk6838cRjxKJg==", "license": "MIT", "dependencies": { "marked": "^15.0.6 || ^16.0.0" @@ -8011,30 +8633,38 @@ "dev": true, "license": "CC0-1.0" }, + "node_modules/media-typer": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/media-typer/-/media-typer-1.1.0.tgz", + "integrity": "sha512-aisnrDP4GNe06UcKFnV5bfMNPBUw4jsLGaWwWfnH3v02GnBuXX2MCVn5RbrWo0j3pczUilYblq7fQ7Nw2t5XKw==", + "license": "MIT", + "engines": { + "node": ">= 0.8" + } + }, "node_modules/meow": { - "version": "12.1.1", + "version": "13.2.0", + "resolved": "https://registry.npmjs.org/meow/-/meow-13.2.0.tgz", + "integrity": "sha512-pxQJQzB6djGPXh08dacEloMFopsOqGVRKFPYvPOt9XDZ1HasbgDZA74CJGreSU4G3Ak7EFJGoiH2auq+yXISgA==", "dev": true, "license": "MIT", "engines": { - "node": ">=16.10" + "node": ">=18" }, "funding": { "url": "https://github.com/sponsors/sindresorhus" } }, - "node_modules/mime": { - "version": "4.1.0", - "resolved": "https://registry.npmjs.org/mime/-/mime-4.1.0.tgz", - "integrity": "sha512-X5ju04+cAzsojXKes0B/S4tcYtFAJ6tTMuSPBEn9CPGlrWr8Fiw7qYeLT0XyH80HSoAoqWCaz+MWKh22P7G1cw==", - "funding": [ - "https://github.com/sponsors/broofa" - ], + "node_modules/merge-descriptors": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/merge-descriptors/-/merge-descriptors-2.0.0.tgz", + "integrity": "sha512-Snk314V5ayFLhp3fkUREub6WtjBfPdCPY1Ln8/8munuLuiYhsABgBVWsozAG+MWMbVEvcdcpbi9R7ww22l9Q3g==", "license": "MIT", - "bin": { - "mime": "bin/cli.js" - }, "engines": { - "node": ">=16" + "node": ">=18" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" } }, "node_modules/mime-db": { @@ -8066,8 +8696,25 @@ "url": "https://github.com/sponsors/sindresorhus" } }, + "node_modules/minimatch": { + "version": "10.2.5", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-10.2.5.tgz", + "integrity": "sha512-MULkVLfKGYDFYejP07QOurDLLQpcjk7Fw+7jXS2R2czRQzR56yHRveU5NDJEOviH+hETZKSkIk5c+T23GjFUMg==", + "license": "BlueOak-1.0.0", + "dependencies": { + "brace-expansion": "^5.0.5" + }, + "engines": { + "node": "18 || 20 || >=22" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, "node_modules/minimist": { "version": "1.2.8", + "resolved": "https://registry.npmjs.org/minimist/-/minimist-1.2.8.tgz", + "integrity": "sha512-2yyAR8qBkN3YuheJanUpWC5U3bb5osDywNB8RzDVlDwDHbocAJveqqj1u8+SVD7jkWT4yvsHCpWqqWqAxb0zCA==", "dev": true, "license": "MIT", "funding": { @@ -8134,7 +8781,9 @@ } }, "node_modules/nan": { - "version": "2.24.0", + "version": "2.26.2", + "resolved": "https://registry.npmjs.org/nan/-/nan-2.26.2.tgz", + "integrity": "sha512-0tTvBTYkt3tdGw22nrAy50x7gpbGCCFH3AFcyS5WiUu7Eu4vWlri1woE6qHBSfy11vksDqkiwjOnlR7WV8G1Hw==", "license": "MIT", "optional": true }, @@ -8157,6 +8806,15 @@ "node": "^10 || ^12 || ^13.7 || ^14 || >=15.0.1" } }, + "node_modules/negotiator": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/negotiator/-/negotiator-1.0.0.tgz", + "integrity": "sha512-8Ofs/AUQh8MaEcrlq5xOX0CQ9ypTF5dl78mjlMNfOK08fzpgTHQRQPBxcPlEtIw0yRpws+Zo/3r+5WRby7u3Gg==", + "license": "MIT", + "engines": { + "node": ">= 0.6" + } + }, "node_modules/node-abort-controller": { "version": "3.1.1", "license": "MIT" @@ -8258,6 +8916,30 @@ "node": ">=0.10.0" } }, + "node_modules/object-inspect": { + "version": "1.13.4", + "resolved": "https://registry.npmjs.org/object-inspect/-/object-inspect-1.13.4.tgz", + "integrity": "sha512-W67iLl4J2EXEGTbfeHCffrjDfitvLANg0UlX3wFUUSTx92KXRFegMHUVgSqE+wvhAbi4WqjGg9czysTV2Epbew==", + "license": "MIT", + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/on-finished": { + "version": "2.4.1", + "resolved": "https://registry.npmjs.org/on-finished/-/on-finished-2.4.1.tgz", + "integrity": "sha512-oVlzkg3ENAhCk2zdv7IJwd/QUD4z2RxRwpkcGY8psCVcCYZNq4wYnVWALHM+brtuJjePWiYF/ClmuDr8Ch5+kg==", + "license": "MIT", + "dependencies": { + "ee-first": "1.1.1" + }, + "engines": { + "node": ">= 0.8" + } + }, "node_modules/once": { "version": "1.4.0", "license": "ISC", @@ -8386,34 +9068,6 @@ "url": "https://github.com/chalk/strip-ansi?sponsor=1" } }, - "node_modules/p-limit": { - "version": "4.0.0", - "dev": true, - "license": "MIT", - "dependencies": { - "yocto-queue": "^1.0.0" - }, - "engines": { - "node": "^12.20.0 || ^14.13.1 || >=16.0.0" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/p-locate": { - "version": "6.0.0", - "dev": true, - "license": "MIT", - "dependencies": { - "p-limit": "^4.0.0" - }, - "engines": { - "node": "^12.20.0 || ^14.13.1 || >=16.0.0" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, "node_modules/p-retry": { "version": "7.1.1", "resolved": "https://registry.npmjs.org/p-retry/-/p-retry-7.1.1.tgz", @@ -8435,6 +9089,8 @@ }, "node_modules/parent-module": { "version": "1.0.1", + "resolved": "https://registry.npmjs.org/parent-module/-/parent-module-1.0.1.tgz", + "integrity": "sha512-GQ2EWRpQV8/o+Aw8YqtfZZPfNRWZYkbidE9k5rpl/hC3vtHHBfGm2Ifi6qWV+coDGkrUKZAxE3Lot5kcsRlh+g==", "dev": true, "license": "MIT", "dependencies": { @@ -8446,6 +9102,8 @@ }, "node_modules/parse-json": { "version": "5.2.0", + "resolved": "https://registry.npmjs.org/parse-json/-/parse-json-5.2.0.tgz", + "integrity": "sha512-ayCKvm/phCGxOkYRSCM82iDwct8/EonSEgCSxWxD7ve6jHggsFl4fZVQBPRNgQoKiuV/odhFrGzQXZwbifC8Rg==", "dev": true, "license": "MIT", "dependencies": { @@ -8482,12 +9140,13 @@ "integrity": "sha512-Ofn/CTFzRGTTxwpNEs9PP93gXShHcTq255nzRYSKe8AkVpZY7e1fpmTfOyoIvjP5HG7Z2ZM7VS9PPhQGW2pOpw==", "license": "MIT" }, - "node_modules/path-exists": { - "version": "5.0.0", - "dev": true, + "node_modules/parseurl": { + "version": "1.3.3", + "resolved": "https://registry.npmjs.org/parseurl/-/parseurl-1.3.3.tgz", + "integrity": "sha512-CiyeOxFT/JZyN5m0z9PfXw4SCBJ6Sygz1Dpl0wqjlhDEGGBP1GnsUVEL0p63hoG1fcj3fHynXi9NYO4nWOL+qQ==", "license": "MIT", "engines": { - "node": "^12.20.0 || ^14.13.1 || >=16.0.0" + "node": ">= 0.8" } }, "node_modules/path-key": { @@ -8497,6 +9156,16 @@ "node": ">=8" } }, + "node_modules/path-to-regexp": { + "version": "8.4.2", + "resolved": "https://registry.npmjs.org/path-to-regexp/-/path-to-regexp-8.4.2.tgz", + "integrity": "sha512-qRcuIdP69NPm4qbACK+aDogI5CBDMi1jKe0ry5rSQJz8JVLsC7jV8XpiJjGRLLol3N+R5ihGYcrPLTno6pAdBA==", + "license": "MIT", + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/express" + } + }, "node_modules/pathe": { "version": "2.0.3", "resolved": "https://registry.npmjs.org/pathe/-/pathe-2.0.3.tgz", @@ -8515,14 +9184,14 @@ } }, "node_modules/pg": { - "version": "8.18.0", - "resolved": "https://registry.npmjs.org/pg/-/pg-8.18.0.tgz", - "integrity": "sha512-xqrUDL1b9MbkydY/s+VZ6v+xiMUmOUk7SS9d/1kpyQxoJ6U9AO1oIJyUWVZojbfe5Cc/oluutcgFG4L9RDP1iQ==", + "version": "8.20.0", + "resolved": "https://registry.npmjs.org/pg/-/pg-8.20.0.tgz", + "integrity": "sha512-ldhMxz2r8fl/6QkXnBD3CR9/xg694oT6DZQ2s6c/RI28OjtSOpxnPrUCGOBJ46RCUxcWdx3p6kw/xnDHjKvaRA==", "license": "MIT", "dependencies": { - "pg-connection-string": "^2.11.0", - "pg-pool": "^3.11.0", - "pg-protocol": "^1.11.0", + "pg-connection-string": "^2.12.0", + "pg-pool": "^3.13.0", + "pg-protocol": "^1.13.0", "pg-types": "2.2.0", "pgpass": "1.0.5" }, @@ -8549,9 +9218,9 @@ "optional": true }, "node_modules/pg-connection-string": { - "version": "2.11.0", - "resolved": "https://registry.npmjs.org/pg-connection-string/-/pg-connection-string-2.11.0.tgz", - "integrity": "sha512-kecgoJwhOpxYU21rZjULrmrBJ698U2RxXofKVzOn5UDj61BPj/qMb7diYUR1nLScCDbrztQFl1TaQZT0t1EtzQ==", + "version": "2.12.0", + "resolved": "https://registry.npmjs.org/pg-connection-string/-/pg-connection-string-2.12.0.tgz", + "integrity": "sha512-U7qg+bpswf3Cs5xLzRqbXbQl85ng0mfSV/J0nnA31MCLgvEaAo7CIhmeyrmJpOr7o+zm0rXK+hNnT5l9RHkCkQ==", "license": "MIT" }, "node_modules/pg-int8": { @@ -8564,18 +9233,18 @@ } }, "node_modules/pg-pool": { - "version": "3.11.0", - "resolved": "https://registry.npmjs.org/pg-pool/-/pg-pool-3.11.0.tgz", - "integrity": "sha512-MJYfvHwtGp870aeusDh+hg9apvOe2zmpZJpyt+BMtzUWlVqbhFmMK6bOBXLBUPd7iRtIF9fZplDc7KrPN3PN7w==", + "version": "3.13.0", + "resolved": "https://registry.npmjs.org/pg-pool/-/pg-pool-3.13.0.tgz", + "integrity": "sha512-gB+R+Xud1gLFuRD/QgOIgGOBE2KCQPaPwkzBBGC9oG69pHTkhQeIuejVIk3/cnDyX39av2AxomQiyPT13WKHQA==", "license": "MIT", "peerDependencies": { "pg": ">=8.0" } }, "node_modules/pg-protocol": { - "version": "1.11.0", - "resolved": "https://registry.npmjs.org/pg-protocol/-/pg-protocol-1.11.0.tgz", - "integrity": "sha512-pfsxk2M9M3BuGgDOfuy37VNRRX3jmKgMjcvAcWqNDpZSf4cUmv8HSOl5ViRQFsfARFn0KuUQTgLxVMbNq5NW3g==", + "version": "1.13.0", + "resolved": "https://registry.npmjs.org/pg-protocol/-/pg-protocol-1.13.0.tgz", + "integrity": "sha512-zzdvXfS6v89r6v7OcFCHfHlyG/wvry1ALxZo4LqgUoy7W9xhBDMaqOuMiF3qEV45VqsN6rdlcehHrfDtlCPc8w==", "license": "MIT" }, "node_modules/pg-types": { @@ -8619,6 +9288,15 @@ "url": "https://github.com/sponsors/jonschlinkert" } }, + "node_modules/pkce-challenge": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/pkce-challenge/-/pkce-challenge-5.0.1.tgz", + "integrity": "sha512-wQ0b/W4Fr01qtpHlqSqspcj3EhBvimsdh0KlHhH8HRZnMsEa0ea2fTULOXOS9ccQr3om+GcGRk4e+isrZWV8qQ==", + "license": "MIT", + "engines": { + "node": ">=16.20.0" + } + }, "node_modules/postcss": { "version": "8.5.8", "resolved": "https://registry.npmjs.org/postcss/-/postcss-8.5.8.tgz", @@ -8770,6 +9448,19 @@ "node": ">=12.0.0" } }, + "node_modules/proxy-addr": { + "version": "2.0.7", + "resolved": "https://registry.npmjs.org/proxy-addr/-/proxy-addr-2.0.7.tgz", + "integrity": "sha512-llQsMLSUDUPT44jdrU/O37qlnifitDP+ZwrmmZcoSKyLKvtZxpyV0n2/bD/N4tBAAZ/gJEdZU7KMraoK1+XYAg==", + "license": "MIT", + "dependencies": { + "forwarded": "0.2.0", + "ipaddr.js": "1.9.1" + }, + "engines": { + "node": ">= 0.10" + } + }, "node_modules/proxy-from-env": { "version": "1.1.0", "license": "MIT" @@ -8792,6 +9483,45 @@ "node": ">=6" } }, + "node_modules/qs": { + "version": "6.15.0", + "resolved": "https://registry.npmjs.org/qs/-/qs-6.15.0.tgz", + "integrity": "sha512-mAZTtNCeetKMH+pSjrb76NAM8V9a05I9aBZOHztWy/UqcJdQYNsf59vrRKWnojAT9Y+GbIvoTBC++CPHqpDBhQ==", + "license": "BSD-3-Clause", + "dependencies": { + "side-channel": "^1.1.0" + }, + "engines": { + "node": ">=0.6" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/range-parser": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/range-parser/-/range-parser-1.2.1.tgz", + "integrity": "sha512-Hrgsx+orqoygnmhFbKaHE6c296J+HTAQXoxEF6gNupROmmGJRoyzfG3ccAveqCBrwr/2yxQ5BVd/GTl5agOwSg==", + "license": "MIT", + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/raw-body": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/raw-body/-/raw-body-3.0.2.tgz", + "integrity": "sha512-K5zQjDllxWkf7Z5xJdV0/B0WTNqx6vxG70zJE4N0kBs4LovmEYWJzQGxC9bS9RAKu3bgM40lrd5zoLJ12MQ5BA==", + "license": "MIT", + "dependencies": { + "bytes": "~3.1.2", + "http-errors": "~2.0.1", + "iconv-lite": "~0.7.0", + "unpipe": "~1.0.0" + }, + "engines": { + "node": ">= 0.10" + } + }, "node_modules/react": { "version": "19.2.4", "resolved": "https://registry.npmjs.org/react/-/react-19.2.4.tgz", @@ -8855,6 +9585,8 @@ }, "node_modules/redis-errors": { "version": "1.2.0", + "resolved": "https://registry.npmjs.org/redis-errors/-/redis-errors-1.2.0.tgz", + "integrity": "sha512-1qny3OExCf0UvUV/5wpYKf2YwPcOqXzkwKKSmKHiE6ZMQs5heeE/c8eXK+PNllPvmjgAbfnsbpkGZWy8cBpn9w==", "license": "MIT", "engines": { "node": ">=4" @@ -8862,6 +9594,8 @@ }, "node_modules/redis-parser": { "version": "3.0.0", + "resolved": "https://registry.npmjs.org/redis-parser/-/redis-parser-3.0.0.tgz", + "integrity": "sha512-DJnGAeenTdpMEH6uAJRK/uiyEIH9WVsUmoLwzudwGJUwZPp80PDBWPHXSAGNPwNvIXAbe7MSUB1zQFugFml66A==", "license": "MIT", "dependencies": { "redis-errors": "^1.0.0" @@ -8879,7 +9613,6 @@ }, "node_modules/require-from-string": { "version": "2.0.2", - "dev": true, "license": "MIT", "engines": { "node": ">=0.10.0" @@ -8900,6 +9633,8 @@ }, "node_modules/resolve-from": { "version": "5.0.0", + "resolved": "https://registry.npmjs.org/resolve-from/-/resolve-from-5.0.0.tgz", + "integrity": "sha512-qYg9KP24dD5qka9J47d0aVky0N+b4fTU89LN9iDnjB5waksiC49rvMB0PrUJQGoTmH50XPiqOvAjDfaijGxYZw==", "dev": true, "license": "MIT", "engines": { @@ -9048,6 +9783,22 @@ "fsevents": "~2.3.2" } }, + "node_modules/router": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/router/-/router-2.2.0.tgz", + "integrity": "sha512-nLTrUKm2UyiL7rlhapu/Zl45FwNgkZGaCpZbIHajDYgwlJCOzLSk+cIPAnsEqV955GjILJnKbdQC1nVPz+gAYQ==", + "license": "MIT", + "dependencies": { + "debug": "^4.4.0", + "depd": "^2.0.0", + "is-promise": "^4.0.0", + "parseurl": "^1.3.3", + "path-to-regexp": "^8.0.0" + }, + "engines": { + "node": ">= 18" + } + }, "node_modules/run-applescript": { "version": "7.1.0", "resolved": "https://registry.npmjs.org/run-applescript/-/run-applescript-7.1.0.tgz", @@ -9090,6 +9841,8 @@ }, "node_modules/safer-buffer": { "version": "2.1.2", + "resolved": "https://registry.npmjs.org/safer-buffer/-/safer-buffer-2.1.2.tgz", + "integrity": "sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg==", "license": "MIT" }, "node_modules/saxes": { @@ -9113,7 +9866,9 @@ "license": "MIT" }, "node_modules/semver": { - "version": "7.7.3", + "version": "7.7.4", + "resolved": "https://registry.npmjs.org/semver/-/semver-7.7.4.tgz", + "integrity": "sha512-vFKC2IEtQnVhpT78h1Yp8wzwrf8CM+MzKMHGJZfBtzhZNycRFnXsHk6E5TxIkkMsgNS7mdX3AGB7x2QM2di4lA==", "license": "ISC", "bin": { "semver": "bin/semver.js" @@ -9122,6 +9877,82 @@ "node": ">=10" } }, + "node_modules/send": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/send/-/send-1.2.1.tgz", + "integrity": "sha512-1gnZf7DFcoIcajTjTwjwuDjzuz4PPcY2StKPlsGAQ1+YH20IRVrBaXSWmdjowTJ6u8Rc01PoYOGHXfP1mYcZNQ==", + "license": "MIT", + "dependencies": { + "debug": "^4.4.3", + "encodeurl": "^2.0.0", + "escape-html": "^1.0.3", + "etag": "^1.8.1", + "fresh": "^2.0.0", + "http-errors": "^2.0.1", + "mime-types": "^3.0.2", + "ms": "^2.1.3", + "on-finished": "^2.4.1", + "range-parser": "^1.2.1", + "statuses": "^2.0.2" + }, + "engines": { + "node": ">= 18" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/express" + } + }, + "node_modules/send/node_modules/mime-db": { + "version": "1.54.0", + "resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.54.0.tgz", + "integrity": "sha512-aU5EJuIN2WDemCcAp2vFBfp/m4EAhWJnUNSSw0ixs7/kXbd6Pg64EmwJkNdFhB8aWt1sH2CTXrLxo/iAGV3oPQ==", + "license": "MIT", + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/send/node_modules/mime-types": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-3.0.2.tgz", + "integrity": "sha512-Lbgzdk0h4juoQ9fCKXW4by0UJqj+nOOrI9MJ1sSj4nI8aI2eo1qmvQEie4VD1glsS250n15LsWsYtCugiStS5A==", + "license": "MIT", + "dependencies": { + "mime-db": "^1.54.0" + }, + "engines": { + "node": ">=18" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/express" + } + }, + "node_modules/serve-static": { + "version": "2.2.1", + "resolved": "https://registry.npmjs.org/serve-static/-/serve-static-2.2.1.tgz", + "integrity": "sha512-xRXBn0pPqQTVQiC8wyQrKs2MOlX24zQ0POGaj0kultvoOCstBQM5yvOhAVSUwOMjQtTvsPWoNCHfPGwaaQJhTw==", + "license": "MIT", + "dependencies": { + "encodeurl": "^2.0.0", + "escape-html": "^1.0.3", + "parseurl": "^1.3.3", + "send": "^1.2.0" + }, + "engines": { + "node": ">= 18" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/express" + } + }, + "node_modules/setprototypeof": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/setprototypeof/-/setprototypeof-1.2.0.tgz", + "integrity": "sha512-E5LDX7Wrp85Kil5bhZv46j8jOeboKq5JMmYM3gVGdGH8xFpPWXUMsNrlODCrkoxMEeNi/XZIwuRvY4XNwYMJpw==", + "license": "ISC" + }, "node_modules/shebang-command": { "version": "2.0.0", "license": "MIT", @@ -9152,6 +9983,78 @@ "url": "https://github.com/sponsors/ljharb" } }, + "node_modules/side-channel": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/side-channel/-/side-channel-1.1.0.tgz", + "integrity": "sha512-ZX99e6tRweoUXqR+VBrslhda51Nh5MTQwou5tnUDgbtyM0dBgmhEDtWGP/xbKn6hqfPRHujUNwz5fy/wbbhnpw==", + "license": "MIT", + "dependencies": { + "es-errors": "^1.3.0", + "object-inspect": "^1.13.3", + "side-channel-list": "^1.0.0", + "side-channel-map": "^1.0.1", + "side-channel-weakmap": "^1.0.2" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/side-channel-list": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/side-channel-list/-/side-channel-list-1.0.0.tgz", + "integrity": "sha512-FCLHtRD/gnpCiCHEiJLOwdmFP+wzCmDEkc9y7NsYxeF4u7Btsn1ZuwgwJGxImImHicJArLP4R0yX4c2KCrMrTA==", + "license": "MIT", + "dependencies": { + "es-errors": "^1.3.0", + "object-inspect": "^1.13.3" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/side-channel-map": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/side-channel-map/-/side-channel-map-1.0.1.tgz", + "integrity": "sha512-VCjCNfgMsby3tTdo02nbjtM/ewra6jPHmpThenkTYh8pG9ucZ/1P8So4u4FGBek/BjpOVsDCMoLA/iuBKIFXRA==", + "license": "MIT", + "dependencies": { + "call-bound": "^1.0.2", + "es-errors": "^1.3.0", + "get-intrinsic": "^1.2.5", + "object-inspect": "^1.13.3" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/side-channel-weakmap": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/side-channel-weakmap/-/side-channel-weakmap-1.0.2.tgz", + "integrity": "sha512-WPS/HvHQTYnHisLo9McqBHOJk2FkHO/tlpvldyrnem4aeQp4hai3gythswg6p01oSoTl58rcpiFAjF2br2Ak2A==", + "license": "MIT", + "dependencies": { + "call-bound": "^1.0.2", + "es-errors": "^1.3.0", + "get-intrinsic": "^1.2.5", + "object-inspect": "^1.13.3", + "side-channel-map": "^1.0.1" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, "node_modules/siginfo": { "version": "2.0.0", "dev": true, @@ -9210,6 +10113,8 @@ }, "node_modules/split-ca": { "version": "1.0.1", + "resolved": "https://registry.npmjs.org/split-ca/-/split-ca-1.0.1.tgz", + "integrity": "sha512-Q5thBSxp5t8WPTTJQS59LrGqOZqOsrhDGDVm8azCqIBjSBd7nd9o2PM+mDulQQkh8h//4U6hFZnc/mul8t5pWQ==", "license": "ISC" }, "node_modules/split2": { @@ -9221,6 +10126,8 @@ }, "node_modules/ssh2": { "version": "1.17.0", + "resolved": "https://registry.npmjs.org/ssh2/-/ssh2-1.17.0.tgz", + "integrity": "sha512-wPldCk3asibAjQ/kziWQQt1Wh3PgDFpC0XpwclzKcdT1vql6KeYxf5LIt4nlFkUeR8WuphYMKqUA56X4rjbfgQ==", "hasInstallScript": true, "dependencies": { "asn1": "^0.2.6", @@ -9241,8 +10148,19 @@ }, "node_modules/standard-as-callback": { "version": "2.1.0", + "resolved": "https://registry.npmjs.org/standard-as-callback/-/standard-as-callback-2.1.0.tgz", + "integrity": "sha512-qoRRSyROncaz1z0mvYqIE4lCd9p2R90i6GxW3uZv5ucSu8tU7B5HXUP1gG8pVZsYNVaXjk8ClXHPttLyxAL48A==", "license": "MIT" }, + "node_modules/statuses": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/statuses/-/statuses-2.0.2.tgz", + "integrity": "sha512-DvEy55V3DB7uknRo+4iOGT5fP1slR8wQohVdknigZPMpMstaKJQWhwiYBACJE3Ul2pTnATihhBYnRhZQHGBiRw==", + "license": "MIT", + "engines": { + "node": ">= 0.8" + } + }, "node_modules/std-env": { "version": "3.10.0", "dev": true, @@ -9488,17 +10406,6 @@ "b4a": "^1.6.4" } }, - "node_modules/text-extensions": { - "version": "2.4.0", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=8" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, "node_modules/thenify": { "version": "3.3.1", "resolved": "https://registry.npmjs.org/thenify/-/thenify-3.3.1.tgz", @@ -9520,11 +10427,6 @@ "node": ">=0.8" } }, - "node_modules/through": { - "version": "2.3.8", - "dev": true, - "license": "MIT" - }, "node_modules/tiktoken": { "version": "1.0.22", "resolved": "https://registry.npmjs.org/tiktoken/-/tiktoken-1.0.22.tgz", @@ -9605,6 +10507,15 @@ "dev": true, "license": "MIT" }, + "node_modules/toidentifier": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/toidentifier/-/toidentifier-1.0.1.tgz", + "integrity": "sha512-o5sSPKEkg/DIQNmH43V0/uerLrpzVedkUh8tGNvaeXpfpuwjKenlSox/2O/BTlZUtEe+JG7s5YhEz608PlAHRA==", + "license": "MIT", + "engines": { + "node": ">=0.6" + } + }, "node_modules/tough-cookie": { "version": "6.0.0", "resolved": "https://registry.npmjs.org/tough-cookie/-/tough-cookie-6.0.0.tgz", @@ -9692,6 +10603,8 @@ }, "node_modules/tweetnacl": { "version": "0.14.5", + "resolved": "https://registry.npmjs.org/tweetnacl/-/tweetnacl-0.14.5.tgz", + "integrity": "sha512-KXXFFdAbFXY4geFIwoyNK+f5Z1b7swfXABfL7HXCmoIWMKU3dmS26672A4EeQtDzLKy7SXmfBu51JolvEKwtGA==", "license": "Unlicense" }, "node_modules/type-fest": { @@ -9706,6 +10619,45 @@ "url": "https://github.com/sponsors/sindresorhus" } }, + "node_modules/type-is": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/type-is/-/type-is-2.0.1.tgz", + "integrity": "sha512-OZs6gsjF4vMp32qrCbiVSkrFmXtG/AZhY3t0iAMrMBiAZyV9oALtXO8hsrHbMXF9x6L3grlFuwW2oAz7cav+Gw==", + "license": "MIT", + "dependencies": { + "content-type": "^1.0.5", + "media-typer": "^1.1.0", + "mime-types": "^3.0.0" + }, + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/type-is/node_modules/mime-db": { + "version": "1.54.0", + "resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.54.0.tgz", + "integrity": "sha512-aU5EJuIN2WDemCcAp2vFBfp/m4EAhWJnUNSSw0ixs7/kXbd6Pg64EmwJkNdFhB8aWt1sH2CTXrLxo/iAGV3oPQ==", + "license": "MIT", + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/type-is/node_modules/mime-types": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-3.0.2.tgz", + "integrity": "sha512-Lbgzdk0h4juoQ9fCKXW4by0UJqj+nOOrI9MJ1sSj4nI8aI2eo1qmvQEie4VD1glsS250n15LsWsYtCugiStS5A==", + "license": "MIT", + "dependencies": { + "mime-db": "^1.54.0" + }, + "engines": { + "node": ">=18" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/express" + } + }, "node_modules/typescript": { "version": "5.9.3", "dev": true, @@ -9741,21 +10693,19 @@ "node": ">=4" } }, - "node_modules/unicorn-magic": { - "version": "0.1.0", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=18" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, "node_modules/universal-user-agent": { "version": "7.0.3", "license": "ISC" }, + "node_modules/unpipe": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/unpipe/-/unpipe-1.0.0.tgz", + "integrity": "sha512-pjy2bYhSsufwWlKwPc+l3cN7+wuJlK6uz0YdJEOlQDbl6jo/YlPi4mb8agUkVC8BF7V8NuzeyPNqRksA3hztKQ==", + "license": "MIT", + "engines": { + "node": ">= 0.8" + } + }, "node_modules/util-deprecate": { "version": "1.0.2", "license": "MIT" @@ -9771,6 +10721,15 @@ "uuid": "dist/esm/bin/uuid" } }, + "node_modules/vary": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/vary/-/vary-1.1.2.tgz", + "integrity": "sha512-BNGbWLfd0eUPabhkXUVm0j8uuvREyTh5ovRa/dyow/BqAbZJyC+5fU+IzQOzmAKzYqYRAISoRhdQr3eIZ/PXqg==", + "license": "MIT", + "engines": { + "node": ">= 0.8" + } + }, "node_modules/vite": { "version": "7.3.1", "resolved": "https://registry.npmjs.org/vite/-/vite-7.3.1.tgz", @@ -10199,17 +11158,6 @@ "node": ">=10" } }, - "node_modules/yocto-queue": { - "version": "1.2.2", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=12.20" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, "node_modules/yoctocolors": { "version": "2.1.2", "resolved": "https://registry.npmjs.org/yoctocolors/-/yoctocolors-2.1.2.tgz", @@ -10287,6 +11235,15 @@ "funding": { "url": "https://github.com/sponsors/colinhacks" } + }, + "node_modules/zod-to-json-schema": { + "version": "3.25.2", + "resolved": "https://registry.npmjs.org/zod-to-json-schema/-/zod-to-json-schema-3.25.2.tgz", + "integrity": "sha512-O/PgfnpT1xKSDeQYSCfRI5Gy3hPf91mKVDuYLUHZJMiDFptvP41MSnWofm8dnCm0256ZNfZIM7DSzuSMAFnjHA==", + "license": "ISC", + "peerDependencies": { + "zod": "^3.25.28 || ^4" + } } } } diff --git a/web/package-lock.json b/web/package-lock.json index 5f89e557..268b547a 100644 --- a/web/package-lock.json +++ b/web/package-lock.json @@ -15,7 +15,7 @@ "class-variance-authority": "^0.7.1", "clsx": "^2.1.1", "cmdk": "^1.1.1", - "lucide-react": "^0.475.0", + "lucide-react": "^0.577.0", "next-themes": "^0.4.6", "radix-ui": "^1.4.3", "react": "^19.1.0", @@ -2766,49 +2766,49 @@ "license": "MIT" }, "node_modules/@tailwindcss/node": { - "version": "4.1.18", - "resolved": "https://registry.npmjs.org/@tailwindcss/node/-/node-4.1.18.tgz", - "integrity": "sha512-DoR7U1P7iYhw16qJ49fgXUlry1t4CpXeErJHnQ44JgTSKMaZUdf17cfn5mHchfJ4KRBZRFA/Coo+MUF5+gOaCQ==", + "version": "4.2.2", + "resolved": "https://registry.npmjs.org/@tailwindcss/node/-/node-4.2.2.tgz", + "integrity": "sha512-pXS+wJ2gZpVXqFaUEjojq7jzMpTGf8rU6ipJz5ovJV6PUGmlJ+jvIwGrzdHdQ80Sg+wmQxUFuoW1UAAwHNEdFA==", "dev": true, "license": "MIT", "dependencies": { - "@jridgewell/remapping": "^2.3.4", - "enhanced-resolve": "^5.18.3", + "@jridgewell/remapping": "^2.3.5", + "enhanced-resolve": "^5.19.0", "jiti": "^2.6.1", - "lightningcss": "1.30.2", + "lightningcss": "1.32.0", "magic-string": "^0.30.21", "source-map-js": "^1.2.1", - "tailwindcss": "4.1.18" + "tailwindcss": "4.2.2" } }, "node_modules/@tailwindcss/oxide": { - "version": "4.1.18", - "resolved": "https://registry.npmjs.org/@tailwindcss/oxide/-/oxide-4.1.18.tgz", - "integrity": "sha512-EgCR5tTS5bUSKQgzeMClT6iCY3ToqE1y+ZB0AKldj809QXk1Y+3jB0upOYZrn9aGIzPtUsP7sX4QQ4XtjBB95A==", + "version": "4.2.2", + "resolved": "https://registry.npmjs.org/@tailwindcss/oxide/-/oxide-4.2.2.tgz", + "integrity": "sha512-qEUA07+E5kehxYp9BVMpq9E8vnJuBHfJEC0vPC5e7iL/hw7HR61aDKoVoKzrG+QKp56vhNZe4qwkRmMC0zDLvg==", "dev": true, "license": "MIT", "engines": { - "node": ">= 10" + "node": ">= 20" }, "optionalDependencies": { - "@tailwindcss/oxide-android-arm64": "4.1.18", - "@tailwindcss/oxide-darwin-arm64": "4.1.18", - "@tailwindcss/oxide-darwin-x64": "4.1.18", - "@tailwindcss/oxide-freebsd-x64": "4.1.18", - "@tailwindcss/oxide-linux-arm-gnueabihf": "4.1.18", - "@tailwindcss/oxide-linux-arm64-gnu": "4.1.18", - "@tailwindcss/oxide-linux-arm64-musl": "4.1.18", - "@tailwindcss/oxide-linux-x64-gnu": "4.1.18", - "@tailwindcss/oxide-linux-x64-musl": "4.1.18", - "@tailwindcss/oxide-wasm32-wasi": "4.1.18", - "@tailwindcss/oxide-win32-arm64-msvc": "4.1.18", - "@tailwindcss/oxide-win32-x64-msvc": "4.1.18" + "@tailwindcss/oxide-android-arm64": "4.2.2", + "@tailwindcss/oxide-darwin-arm64": "4.2.2", + "@tailwindcss/oxide-darwin-x64": "4.2.2", + "@tailwindcss/oxide-freebsd-x64": "4.2.2", + "@tailwindcss/oxide-linux-arm-gnueabihf": "4.2.2", + "@tailwindcss/oxide-linux-arm64-gnu": "4.2.2", + "@tailwindcss/oxide-linux-arm64-musl": "4.2.2", + "@tailwindcss/oxide-linux-x64-gnu": "4.2.2", + "@tailwindcss/oxide-linux-x64-musl": "4.2.2", + "@tailwindcss/oxide-wasm32-wasi": "4.2.2", + "@tailwindcss/oxide-win32-arm64-msvc": "4.2.2", + "@tailwindcss/oxide-win32-x64-msvc": "4.2.2" } }, "node_modules/@tailwindcss/oxide-android-arm64": { - "version": "4.1.18", - "resolved": "https://registry.npmjs.org/@tailwindcss/oxide-android-arm64/-/oxide-android-arm64-4.1.18.tgz", - "integrity": "sha512-dJHz7+Ugr9U/diKJA0W6N/6/cjI+ZTAoxPf9Iz9BFRF2GzEX8IvXxFIi/dZBloVJX/MZGvRuFA9rqwdiIEZQ0Q==", + "version": "4.2.2", + "resolved": "https://registry.npmjs.org/@tailwindcss/oxide-android-arm64/-/oxide-android-arm64-4.2.2.tgz", + "integrity": "sha512-dXGR1n+P3B6748jZO/SvHZq7qBOqqzQ+yFrXpoOWWALWndF9MoSKAT3Q0fYgAzYzGhxNYOoysRvYlpixRBBoDg==", "cpu": [ "arm64" ], @@ -2819,13 +2819,13 @@ "android" ], "engines": { - "node": ">= 10" + "node": ">= 20" } }, "node_modules/@tailwindcss/oxide-darwin-arm64": { - "version": "4.1.18", - "resolved": "https://registry.npmjs.org/@tailwindcss/oxide-darwin-arm64/-/oxide-darwin-arm64-4.1.18.tgz", - "integrity": "sha512-Gc2q4Qhs660bhjyBSKgq6BYvwDz4G+BuyJ5H1xfhmDR3D8HnHCmT/BSkvSL0vQLy/nkMLY20PQ2OoYMO15Jd0A==", + "version": "4.2.2", + "resolved": "https://registry.npmjs.org/@tailwindcss/oxide-darwin-arm64/-/oxide-darwin-arm64-4.2.2.tgz", + "integrity": "sha512-iq9Qjr6knfMpZHj55/37ouZeykwbDqF21gPFtfnhCCKGDcPI/21FKC9XdMO/XyBM7qKORx6UIhGgg6jLl7BZlg==", "cpu": [ "arm64" ], @@ -2836,13 +2836,13 @@ "darwin" ], "engines": { - "node": ">= 10" + "node": ">= 20" } }, "node_modules/@tailwindcss/oxide-darwin-x64": { - "version": "4.1.18", - "resolved": "https://registry.npmjs.org/@tailwindcss/oxide-darwin-x64/-/oxide-darwin-x64-4.1.18.tgz", - "integrity": "sha512-FL5oxr2xQsFrc3X9o1fjHKBYBMD1QZNyc1Xzw/h5Qu4XnEBi3dZn96HcHm41c/euGV+GRiXFfh2hUCyKi/e+yw==", + "version": "4.2.2", + "resolved": "https://registry.npmjs.org/@tailwindcss/oxide-darwin-x64/-/oxide-darwin-x64-4.2.2.tgz", + "integrity": "sha512-BlR+2c3nzc8f2G639LpL89YY4bdcIdUmiOOkv2GQv4/4M0vJlpXEa0JXNHhCHU7VWOKWT/CjqHdTP8aUuDJkuw==", "cpu": [ "x64" ], @@ -2853,13 +2853,13 @@ "darwin" ], "engines": { - "node": ">= 10" + "node": ">= 20" } }, "node_modules/@tailwindcss/oxide-freebsd-x64": { - "version": "4.1.18", - "resolved": "https://registry.npmjs.org/@tailwindcss/oxide-freebsd-x64/-/oxide-freebsd-x64-4.1.18.tgz", - "integrity": "sha512-Fj+RHgu5bDodmV1dM9yAxlfJwkkWvLiRjbhuO2LEtwtlYlBgiAT4x/j5wQr1tC3SANAgD+0YcmWVrj8R9trVMA==", + "version": "4.2.2", + "resolved": "https://registry.npmjs.org/@tailwindcss/oxide-freebsd-x64/-/oxide-freebsd-x64-4.2.2.tgz", + "integrity": "sha512-YUqUgrGMSu2CDO82hzlQ5qSb5xmx3RUrke/QgnoEx7KvmRJHQuZHZmZTLSuuHwFf0DJPybFMXMYf+WJdxHy/nQ==", "cpu": [ "x64" ], @@ -2870,13 +2870,13 @@ "freebsd" ], "engines": { - "node": ">= 10" + "node": ">= 20" } }, "node_modules/@tailwindcss/oxide-linux-arm-gnueabihf": { - "version": "4.1.18", - "resolved": "https://registry.npmjs.org/@tailwindcss/oxide-linux-arm-gnueabihf/-/oxide-linux-arm-gnueabihf-4.1.18.tgz", - "integrity": "sha512-Fp+Wzk/Ws4dZn+LV2Nqx3IilnhH51YZoRaYHQsVq3RQvEl+71VGKFpkfHrLM/Li+kt5c0DJe/bHXK1eHgDmdiA==", + "version": "4.2.2", + "resolved": "https://registry.npmjs.org/@tailwindcss/oxide-linux-arm-gnueabihf/-/oxide-linux-arm-gnueabihf-4.2.2.tgz", + "integrity": "sha512-FPdhvsW6g06T9BWT0qTwiVZYE2WIFo2dY5aCSpjG/S/u1tby+wXoslXS0kl3/KXnULlLr1E3NPRRw0g7t2kgaQ==", "cpu": [ "arm" ], @@ -2887,13 +2887,13 @@ "linux" ], "engines": { - "node": ">= 10" + "node": ">= 20" } }, "node_modules/@tailwindcss/oxide-linux-arm64-gnu": { - "version": "4.1.18", - "resolved": "https://registry.npmjs.org/@tailwindcss/oxide-linux-arm64-gnu/-/oxide-linux-arm64-gnu-4.1.18.tgz", - "integrity": "sha512-S0n3jboLysNbh55Vrt7pk9wgpyTTPD0fdQeh7wQfMqLPM/Hrxi+dVsLsPrycQjGKEQk85Kgbx+6+QnYNiHalnw==", + "version": "4.2.2", + "resolved": "https://registry.npmjs.org/@tailwindcss/oxide-linux-arm64-gnu/-/oxide-linux-arm64-gnu-4.2.2.tgz", + "integrity": "sha512-4og1V+ftEPXGttOO7eCmW7VICmzzJWgMx+QXAJRAhjrSjumCwWqMfkDrNu1LXEQzNAwz28NCUpucgQPrR4S2yw==", "cpu": [ "arm64" ], @@ -2904,13 +2904,13 @@ "linux" ], "engines": { - "node": ">= 10" + "node": ">= 20" } }, "node_modules/@tailwindcss/oxide-linux-arm64-musl": { - "version": "4.1.18", - "resolved": "https://registry.npmjs.org/@tailwindcss/oxide-linux-arm64-musl/-/oxide-linux-arm64-musl-4.1.18.tgz", - "integrity": "sha512-1px92582HkPQlaaCkdRcio71p8bc8i/ap5807tPRDK/uw953cauQBT8c5tVGkOwrHMfc2Yh6UuxaH4vtTjGvHg==", + "version": "4.2.2", + "resolved": "https://registry.npmjs.org/@tailwindcss/oxide-linux-arm64-musl/-/oxide-linux-arm64-musl-4.2.2.tgz", + "integrity": "sha512-oCfG/mS+/+XRlwNjnsNLVwnMWYH7tn/kYPsNPh+JSOMlnt93mYNCKHYzylRhI51X+TbR+ufNhhKKzm6QkqX8ag==", "cpu": [ "arm64" ], @@ -2921,13 +2921,13 @@ "linux" ], "engines": { - "node": ">= 10" + "node": ">= 20" } }, "node_modules/@tailwindcss/oxide-linux-x64-gnu": { - "version": "4.1.18", - "resolved": "https://registry.npmjs.org/@tailwindcss/oxide-linux-x64-gnu/-/oxide-linux-x64-gnu-4.1.18.tgz", - "integrity": "sha512-v3gyT0ivkfBLoZGF9LyHmts0Isc8jHZyVcbzio6Wpzifg/+5ZJpDiRiUhDLkcr7f/r38SWNe7ucxmGW3j3Kb/g==", + "version": "4.2.2", + "resolved": "https://registry.npmjs.org/@tailwindcss/oxide-linux-x64-gnu/-/oxide-linux-x64-gnu-4.2.2.tgz", + "integrity": "sha512-rTAGAkDgqbXHNp/xW0iugLVmX62wOp2PoE39BTCGKjv3Iocf6AFbRP/wZT/kuCxC9QBh9Pu8XPkv/zCZB2mcMg==", "cpu": [ "x64" ], @@ -2938,13 +2938,13 @@ "linux" ], "engines": { - "node": ">= 10" + "node": ">= 20" } }, "node_modules/@tailwindcss/oxide-linux-x64-musl": { - "version": "4.1.18", - "resolved": "https://registry.npmjs.org/@tailwindcss/oxide-linux-x64-musl/-/oxide-linux-x64-musl-4.1.18.tgz", - "integrity": "sha512-bhJ2y2OQNlcRwwgOAGMY0xTFStt4/wyU6pvI6LSuZpRgKQwxTec0/3Scu91O8ir7qCR3AuepQKLU/kX99FouqQ==", + "version": "4.2.2", + "resolved": "https://registry.npmjs.org/@tailwindcss/oxide-linux-x64-musl/-/oxide-linux-x64-musl-4.2.2.tgz", + "integrity": "sha512-XW3t3qwbIwiSyRCggeO2zxe3KWaEbM0/kW9e8+0XpBgyKU4ATYzcVSMKteZJ1iukJ3HgHBjbg9P5YPRCVUxlnQ==", "cpu": [ "x64" ], @@ -2955,13 +2955,13 @@ "linux" ], "engines": { - "node": ">= 10" + "node": ">= 20" } }, "node_modules/@tailwindcss/oxide-wasm32-wasi": { - "version": "4.1.18", - "resolved": "https://registry.npmjs.org/@tailwindcss/oxide-wasm32-wasi/-/oxide-wasm32-wasi-4.1.18.tgz", - "integrity": "sha512-LffYTvPjODiP6PT16oNeUQJzNVyJl1cjIebq/rWWBF+3eDst5JGEFSc5cWxyRCJ0Mxl+KyIkqRxk1XPEs9x8TA==", + "version": "4.2.2", + "resolved": "https://registry.npmjs.org/@tailwindcss/oxide-wasm32-wasi/-/oxide-wasm32-wasi-4.2.2.tgz", + "integrity": "sha512-eKSztKsmEsn1O5lJ4ZAfyn41NfG7vzCg496YiGtMDV86jz1q/irhms5O0VrY6ZwTUkFy/EKG3RfWgxSI3VbZ8Q==", "bundleDependencies": [ "@napi-rs/wasm-runtime", "@emnapi/core", @@ -2977,21 +2977,21 @@ "license": "MIT", "optional": true, "dependencies": { - "@emnapi/core": "^1.7.1", - "@emnapi/runtime": "^1.7.1", + "@emnapi/core": "^1.8.1", + "@emnapi/runtime": "^1.8.1", "@emnapi/wasi-threads": "^1.1.0", - "@napi-rs/wasm-runtime": "^1.1.0", + "@napi-rs/wasm-runtime": "^1.1.1", "@tybys/wasm-util": "^0.10.1", - "tslib": "^2.4.0" + "tslib": "^2.8.1" }, "engines": { "node": ">=14.0.0" } }, "node_modules/@tailwindcss/oxide-win32-arm64-msvc": { - "version": "4.1.18", - "resolved": "https://registry.npmjs.org/@tailwindcss/oxide-win32-arm64-msvc/-/oxide-win32-arm64-msvc-4.1.18.tgz", - "integrity": "sha512-HjSA7mr9HmC8fu6bdsZvZ+dhjyGCLdotjVOgLA2vEqxEBZaQo9YTX4kwgEvPCpRh8o4uWc4J/wEoFzhEmjvPbA==", + "version": "4.2.2", + "resolved": "https://registry.npmjs.org/@tailwindcss/oxide-win32-arm64-msvc/-/oxide-win32-arm64-msvc-4.2.2.tgz", + "integrity": "sha512-qPmaQM4iKu5mxpsrWZMOZRgZv1tOZpUm+zdhhQP0VhJfyGGO3aUKdbh3gDZc/dPLQwW4eSqWGrrcWNBZWUWaXQ==", "cpu": [ "arm64" ], @@ -3002,13 +3002,13 @@ "win32" ], "engines": { - "node": ">= 10" + "node": ">= 20" } }, "node_modules/@tailwindcss/oxide-win32-x64-msvc": { - "version": "4.1.18", - "resolved": "https://registry.npmjs.org/@tailwindcss/oxide-win32-x64-msvc/-/oxide-win32-x64-msvc-4.1.18.tgz", - "integrity": "sha512-bJWbyYpUlqamC8dpR7pfjA0I7vdF6t5VpUGMWRkXVE3AXgIZjYUYAK7II1GNaxR8J1SSrSrppRar8G++JekE3Q==", + "version": "4.2.2", + "resolved": "https://registry.npmjs.org/@tailwindcss/oxide-win32-x64-msvc/-/oxide-win32-x64-msvc-4.2.2.tgz", + "integrity": "sha512-1T/37VvI7WyH66b+vqHj/cLwnCxt7Qt3WFu5Q8hk65aOvlwAhs7rAp1VkulBJw/N4tMirXjVnylTR72uI0HGcA==", "cpu": [ "x64" ], @@ -3019,7 +3019,7 @@ "win32" ], "engines": { - "node": ">= 10" + "node": ">= 20" } }, "node_modules/@tailwindcss/typography": { @@ -3035,27 +3035,27 @@ } }, "node_modules/@tailwindcss/vite": { - "version": "4.1.18", - "resolved": "https://registry.npmjs.org/@tailwindcss/vite/-/vite-4.1.18.tgz", - "integrity": "sha512-jVA+/UpKL1vRLg6Hkao5jldawNmRo7mQYrZtNHMIVpLfLhDml5nMRUo/8MwoX2vNXvnaXNNMedrMfMugAVX1nA==", + "version": "4.2.2", + "resolved": "https://registry.npmjs.org/@tailwindcss/vite/-/vite-4.2.2.tgz", + "integrity": "sha512-mEiF5HO1QqCLXoNEfXVA1Tzo+cYsrqV7w9Juj2wdUFyW07JRenqMG225MvPwr3ZD9N1bFQj46X7r33iHxLUW0w==", "dev": true, "license": "MIT", "dependencies": { - "@tailwindcss/node": "4.1.18", - "@tailwindcss/oxide": "4.1.18", - "tailwindcss": "4.1.18" + "@tailwindcss/node": "4.2.2", + "@tailwindcss/oxide": "4.2.2", + "tailwindcss": "4.2.2" }, "peerDependencies": { - "vite": "^5.2.0 || ^6 || ^7" + "vite": "^5.2.0 || ^6 || ^7 || ^8" } }, "node_modules/@tanstack/history": { - "version": "1.154.14", - "resolved": "https://registry.npmjs.org/@tanstack/history/-/history-1.154.14.tgz", - "integrity": "sha512-xyIfof8eHBuub1CkBnbKNKQXeRZC4dClhmzePHVOEel4G7lk/dW+TQ16da7CFdeNLv6u6Owf5VoBQxoo6DFTSA==", + "version": "1.161.6", + "resolved": "https://registry.npmjs.org/@tanstack/history/-/history-1.161.6.tgz", + "integrity": "sha512-NaOGLRrddszbQj9upGat6HG/4TKvXLvu+osAIgfxPYA+eIvYKv8GKDJOrY2D3/U9MRnKfMWD7bU4jeD4xmqyIg==", "license": "MIT", "engines": { - "node": ">=12" + "node": ">=20.19" }, "funding": { "type": "github", @@ -3063,9 +3063,9 @@ } }, "node_modules/@tanstack/query-core": { - "version": "5.90.20", - "resolved": "https://registry.npmjs.org/@tanstack/query-core/-/query-core-5.90.20.tgz", - "integrity": "sha512-OMD2HLpNouXEfZJWcKeVKUgQ5n+n3A2JFmBaScpNDUqSrQSjiveC7dKMe53uJUg1nDG16ttFPz2xfilz6i2uVg==", + "version": "5.96.1", + "resolved": "https://registry.npmjs.org/@tanstack/query-core/-/query-core-5.96.1.tgz", + "integrity": "sha512-u1yBgtavSy+N8wgtW3PiER6UpxcplMje65yXnnVgiHTqiMwLlxiw4WvQDrXyn+UD6lnn8kHaxmerJUzQcV/MMg==", "license": "MIT", "funding": { "type": "github", @@ -3073,12 +3073,12 @@ } }, "node_modules/@tanstack/react-query": { - "version": "5.90.21", - "resolved": "https://registry.npmjs.org/@tanstack/react-query/-/react-query-5.90.21.tgz", - "integrity": "sha512-0Lu6y5t+tvlTJMTO7oh5NSpJfpg/5D41LlThfepTixPYkJ0sE2Jj0m0f6yYqujBwIXlId87e234+MxG3D3g7kg==", + "version": "5.96.1", + "resolved": "https://registry.npmjs.org/@tanstack/react-query/-/react-query-5.96.1.tgz", + "integrity": "sha512-2X7KYK5KKWUKGeWCVcqxXAkYefJtrKB7tSKWgeG++b0H6BRHxQaLSSi8AxcgjmUnnosHuh9WsFZqvE16P1WCzA==", "license": "MIT", "dependencies": { - "@tanstack/query-core": "5.90.20" + "@tanstack/query-core": "5.96.1" }, "funding": { "type": "github", @@ -3089,20 +3089,18 @@ } }, "node_modules/@tanstack/react-router": { - "version": "1.160.0", - "resolved": "https://registry.npmjs.org/@tanstack/react-router/-/react-router-1.160.0.tgz", - "integrity": "sha512-leT/nymh9rKFVivy4b/F8/PZiMrLpotNiyemNg0/KjdZNzo5oVEdFnsXVFnBI1lL4WXRbiq7RK8+fI0SKsT6ww==", + "version": "1.168.10", + "resolved": "https://registry.npmjs.org/@tanstack/react-router/-/react-router-1.168.10.tgz", + "integrity": "sha512-/RmDlOwDkCug609KdPB3U+U1zmrtadJpvsmRg2zEn8TRCKRNri7dYZIjQZbNg8PgUiRL4T6njrZBV1ChzblNaA==", "license": "MIT", "dependencies": { - "@tanstack/history": "1.154.14", - "@tanstack/react-store": "^0.8.0", - "@tanstack/router-core": "1.160.0", - "isbot": "^5.1.22", - "tiny-invariant": "^1.3.3", - "tiny-warning": "^1.0.3" + "@tanstack/history": "1.161.6", + "@tanstack/react-store": "^0.9.3", + "@tanstack/router-core": "1.168.9", + "isbot": "^5.1.22" }, "engines": { - "node": ">=12" + "node": ">=20.19" }, "funding": { "type": "github", @@ -3114,12 +3112,12 @@ } }, "node_modules/@tanstack/react-store": { - "version": "0.8.1", - "resolved": "https://registry.npmjs.org/@tanstack/react-store/-/react-store-0.8.1.tgz", - "integrity": "sha512-XItJt+rG8c5Wn/2L/bnxys85rBpm0BfMbhb4zmPVLXAKY9POrp1xd6IbU4PKoOI+jSEGc3vntPRfLGSgXfE2Ig==", + "version": "0.9.3", + "resolved": "https://registry.npmjs.org/@tanstack/react-store/-/react-store-0.9.3.tgz", + "integrity": "sha512-y2iHd/N9OkoQbFJLUX1T9vbc2O9tjH0pQRgTcx1/Nz4IlwLvkgpuglXUx+mXt0g5ZDFrEeDnONPqkbfxXJKwRg==", "license": "MIT", "dependencies": { - "@tanstack/store": "0.8.1", + "@tanstack/store": "0.9.3", "use-sync-external-store": "^1.6.0" }, "funding": { @@ -3132,21 +3130,21 @@ } }, "node_modules/@tanstack/router-core": { - "version": "1.160.0", - "resolved": "https://registry.npmjs.org/@tanstack/router-core/-/router-core-1.160.0.tgz", - "integrity": "sha512-vbh6OsE0MG+0c+SKh2uk5yEEZlWsxT96Ub2JaTs7ixOvZp3Wu9PTEIe2BA3cShNZhEsDI0Le4NqgY4XIaHLLvA==", + "version": "1.168.9", + "resolved": "https://registry.npmjs.org/@tanstack/router-core/-/router-core-1.168.9.tgz", + "integrity": "sha512-18oeEwEDyXOIuO1VBP9ACaK7tYHZUjynGDCoUh/5c/BNhia9vCJCp9O0LfhZXOorDc/PmLSgvmweFhVmIxF10g==", "license": "MIT", "dependencies": { - "@tanstack/history": "1.154.14", - "@tanstack/store": "^0.8.0", + "@tanstack/history": "1.161.6", "cookie-es": "^2.0.0", "seroval": "^1.4.2", - "seroval-plugins": "^1.4.2", - "tiny-invariant": "^1.3.3", - "tiny-warning": "^1.0.3" + "seroval-plugins": "^1.4.2" + }, + "bin": { + "intent": "bin/intent.js" }, "engines": { - "node": ">=12" + "node": ">=20.19" }, "funding": { "type": "github", @@ -3154,9 +3152,9 @@ } }, "node_modules/@tanstack/store": { - "version": "0.8.1", - "resolved": "https://registry.npmjs.org/@tanstack/store/-/store-0.8.1.tgz", - "integrity": "sha512-PtOisLjUZPz5VyPRSCGjNOlwTvabdTBQ2K80DpVL1chGVr35WRxfeavAPdNq6pm/t7F8GhoR2qtmkkqtCEtHYw==", + "version": "0.9.3", + "resolved": "https://registry.npmjs.org/@tanstack/store/-/store-0.9.3.tgz", + "integrity": "sha512-8reSzl/qGWGGVKhBoxXPMWzATSbZLZFWhwBAFO9NAyp0TxzfBP0mIrGb8CP8KrQTmvzXlR/vFPPUrHTLBGyFyw==", "license": "MIT", "funding": { "type": "github", @@ -3164,43 +3162,52 @@ } }, "node_modules/@trpc/client": { - "version": "11.10.0", - "resolved": "https://registry.npmjs.org/@trpc/client/-/client-11.10.0.tgz", - "integrity": "sha512-h0s2AwDtuhS8INRb4hlo4z3RKCkarWqlOy+3ffJgrlDxzzW6aLUN+9nDrcN4huPje1Em15tbCOqhIc6oaKYTRw==", + "version": "11.16.0", + "resolved": "https://registry.npmjs.org/@trpc/client/-/client-11.16.0.tgz", + "integrity": "sha512-TxIzm7OoK3baKZ0XCbuMUbI3GhgjcbKHIc4nWVKaRpCRnbSh0T31BT6fTPYwtnA/Nur8pBCGqC2B4J5hEPiPFQ==", "funding": [ "https://trpc.io/sponsor" ], "license": "MIT", + "bin": { + "intent": "bin/intent.js" + }, "peerDependencies": { - "@trpc/server": "11.10.0", + "@trpc/server": "11.16.0", "typescript": ">=5.7.2" } }, "node_modules/@trpc/server": { - "version": "11.10.0", - "resolved": "https://registry.npmjs.org/@trpc/server/-/server-11.10.0.tgz", - "integrity": "sha512-zZjTrR6He61e5TiT7e/bQqab/jRcXBZM8Fg78Yoo8uh5pz60dzzbYuONNUCOkafv5ppXVMms4NHYfNZgzw50vg==", + "version": "11.16.0", + "resolved": "https://registry.npmjs.org/@trpc/server/-/server-11.16.0.tgz", + "integrity": "sha512-XgGuUMddrUTd04+za/WE5GFuZ1/YU9XQG0t3VL5WOIu2JspkOlq6k4RYEiqS6HSJt+S0RXaPdIoE2anIP/BBRQ==", "funding": [ "https://trpc.io/sponsor" ], "license": "MIT", "peer": true, + "bin": { + "intent": "bin/intent.js" + }, "peerDependencies": { "typescript": ">=5.7.2" } }, "node_modules/@trpc/tanstack-react-query": { - "version": "11.10.0", - "resolved": "https://registry.npmjs.org/@trpc/tanstack-react-query/-/tanstack-react-query-11.10.0.tgz", - "integrity": "sha512-fXkkhH6UDFAFMwlXePkgbmUAiDgflpbWx4EbzRANKFzMtxyFrBjbSHQrAPrm4ZLjZdJcIeHK0oAIDPhiOh4VYg==", + "version": "11.16.0", + "resolved": "https://registry.npmjs.org/@trpc/tanstack-react-query/-/tanstack-react-query-11.16.0.tgz", + "integrity": "sha512-kfNYJ5NCk67tmRCO/QWCycJmRamuEKmZf1HRG8yCBl8aTJdTwVq7l6AffFIMab3Q+NmQzwP8XXkAEK/EelKYOA==", "funding": [ "https://trpc.io/sponsor" ], "license": "MIT", + "bin": { + "intent": "bin/intent.js" + }, "peerDependencies": { "@tanstack/react-query": "^5.80.3", - "@trpc/client": "11.10.0", - "@trpc/server": "11.10.0", + "@trpc/client": "11.16.0", + "@trpc/server": "11.16.0", "react": ">=18.2.0", "typescript": ">=5.7.2" } @@ -3611,9 +3618,9 @@ "license": "MIT" }, "node_modules/cookie-es": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/cookie-es/-/cookie-es-2.0.0.tgz", - "integrity": "sha512-RAj4E421UYRgqokKUmotqAwuplYw15qtdXfY+hGzgCJ/MBjCVZcSoHK/kH9kocfjRjcDME7IiDWR/1WX1TM2Pg==", + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/cookie-es/-/cookie-es-2.0.1.tgz", + "integrity": "sha512-aVf4A4hI2w70LnF7GG+7xDQUkliwiXWXFvTjkip4+b64ygDQ2sJPRSKFDHbxn8o0xu9QzPkMuuiWIXyFSE2slA==", "license": "MIT" }, "node_modules/cssesc": { @@ -3837,9 +3844,9 @@ "license": "ISC" }, "node_modules/enhanced-resolve": { - "version": "5.19.0", - "resolved": "https://registry.npmjs.org/enhanced-resolve/-/enhanced-resolve-5.19.0.tgz", - "integrity": "sha512-phv3E1Xl4tQOShqSte26C7Fl84EwUdZsyOuSSk9qtAGyyQs2s3jJzComh+Abf4g187lUUAvH+H26omrqia2aGg==", + "version": "5.20.1", + "resolved": "https://registry.npmjs.org/enhanced-resolve/-/enhanced-resolve-5.20.1.tgz", + "integrity": "sha512-Qohcme7V1inbAfvjItgw0EaxVX5q2rdVEZHRBrEQdRZTssLDGsL8Lwrznl8oQ/6kuTJONLaDcGjkNP247XEhcA==", "dev": true, "license": "MIT", "dependencies": { @@ -4177,9 +4184,9 @@ } }, "node_modules/lightningcss": { - "version": "1.30.2", - "resolved": "https://registry.npmjs.org/lightningcss/-/lightningcss-1.30.2.tgz", - "integrity": "sha512-utfs7Pr5uJyyvDETitgsaqSyjCb2qNRAtuqUeWIAKztsOYdcACf2KtARYXg2pSvhkt+9NfoaNY7fxjl6nuMjIQ==", + "version": "1.32.0", + "resolved": "https://registry.npmjs.org/lightningcss/-/lightningcss-1.32.0.tgz", + "integrity": "sha512-NXYBzinNrblfraPGyrbPoD19C1h9lfI/1mzgWYvXUTe414Gz/X1FD2XBZSZM7rRTrMA8JL3OtAaGifrIKhQ5yQ==", "dev": true, "license": "MPL-2.0", "dependencies": { @@ -4193,23 +4200,23 @@ "url": "https://opencollective.com/parcel" }, "optionalDependencies": { - "lightningcss-android-arm64": "1.30.2", - "lightningcss-darwin-arm64": "1.30.2", - "lightningcss-darwin-x64": "1.30.2", - "lightningcss-freebsd-x64": "1.30.2", - "lightningcss-linux-arm-gnueabihf": "1.30.2", - "lightningcss-linux-arm64-gnu": "1.30.2", - "lightningcss-linux-arm64-musl": "1.30.2", - "lightningcss-linux-x64-gnu": "1.30.2", - "lightningcss-linux-x64-musl": "1.30.2", - "lightningcss-win32-arm64-msvc": "1.30.2", - "lightningcss-win32-x64-msvc": "1.30.2" + "lightningcss-android-arm64": "1.32.0", + "lightningcss-darwin-arm64": "1.32.0", + "lightningcss-darwin-x64": "1.32.0", + "lightningcss-freebsd-x64": "1.32.0", + "lightningcss-linux-arm-gnueabihf": "1.32.0", + "lightningcss-linux-arm64-gnu": "1.32.0", + "lightningcss-linux-arm64-musl": "1.32.0", + "lightningcss-linux-x64-gnu": "1.32.0", + "lightningcss-linux-x64-musl": "1.32.0", + "lightningcss-win32-arm64-msvc": "1.32.0", + "lightningcss-win32-x64-msvc": "1.32.0" } }, "node_modules/lightningcss-android-arm64": { - "version": "1.30.2", - "resolved": "https://registry.npmjs.org/lightningcss-android-arm64/-/lightningcss-android-arm64-1.30.2.tgz", - "integrity": "sha512-BH9sEdOCahSgmkVhBLeU7Hc9DWeZ1Eb6wNS6Da8igvUwAe0sqROHddIlvU06q3WyXVEOYDZ6ykBZQnjTbmo4+A==", + "version": "1.32.0", + "resolved": "https://registry.npmjs.org/lightningcss-android-arm64/-/lightningcss-android-arm64-1.32.0.tgz", + "integrity": "sha512-YK7/ClTt4kAK0vo6w3X+Pnm0D2cf2vPHbhOXdoNti1Ga0al1P4TBZhwjATvjNwLEBCnKvjJc2jQgHXH0NEwlAg==", "cpu": [ "arm64" ], @@ -4228,9 +4235,9 @@ } }, "node_modules/lightningcss-darwin-arm64": { - "version": "1.30.2", - "resolved": "https://registry.npmjs.org/lightningcss-darwin-arm64/-/lightningcss-darwin-arm64-1.30.2.tgz", - "integrity": "sha512-ylTcDJBN3Hp21TdhRT5zBOIi73P6/W0qwvlFEk22fkdXchtNTOU4Qc37SkzV+EKYxLouZ6M4LG9NfZ1qkhhBWA==", + "version": "1.32.0", + "resolved": "https://registry.npmjs.org/lightningcss-darwin-arm64/-/lightningcss-darwin-arm64-1.32.0.tgz", + "integrity": "sha512-RzeG9Ju5bag2Bv1/lwlVJvBE3q6TtXskdZLLCyfg5pt+HLz9BqlICO7LZM7VHNTTn/5PRhHFBSjk5lc4cmscPQ==", "cpu": [ "arm64" ], @@ -4249,9 +4256,9 @@ } }, "node_modules/lightningcss-darwin-x64": { - "version": "1.30.2", - "resolved": "https://registry.npmjs.org/lightningcss-darwin-x64/-/lightningcss-darwin-x64-1.30.2.tgz", - "integrity": "sha512-oBZgKchomuDYxr7ilwLcyms6BCyLn0z8J0+ZZmfpjwg9fRVZIR5/GMXd7r9RH94iDhld3UmSjBM6nXWM2TfZTQ==", + "version": "1.32.0", + "resolved": "https://registry.npmjs.org/lightningcss-darwin-x64/-/lightningcss-darwin-x64-1.32.0.tgz", + "integrity": "sha512-U+QsBp2m/s2wqpUYT/6wnlagdZbtZdndSmut/NJqlCcMLTWp5muCrID+K5UJ6jqD2BFshejCYXniPDbNh73V8w==", "cpu": [ "x64" ], @@ -4270,9 +4277,9 @@ } }, "node_modules/lightningcss-freebsd-x64": { - "version": "1.30.2", - "resolved": "https://registry.npmjs.org/lightningcss-freebsd-x64/-/lightningcss-freebsd-x64-1.30.2.tgz", - "integrity": "sha512-c2bH6xTrf4BDpK8MoGG4Bd6zAMZDAXS569UxCAGcA7IKbHNMlhGQ89eRmvpIUGfKWNVdbhSbkQaWhEoMGmGslA==", + "version": "1.32.0", + "resolved": "https://registry.npmjs.org/lightningcss-freebsd-x64/-/lightningcss-freebsd-x64-1.32.0.tgz", + "integrity": "sha512-JCTigedEksZk3tHTTthnMdVfGf61Fky8Ji2E4YjUTEQX14xiy/lTzXnu1vwiZe3bYe0q+SpsSH/CTeDXK6WHig==", "cpu": [ "x64" ], @@ -4291,9 +4298,9 @@ } }, "node_modules/lightningcss-linux-arm-gnueabihf": { - "version": "1.30.2", - "resolved": "https://registry.npmjs.org/lightningcss-linux-arm-gnueabihf/-/lightningcss-linux-arm-gnueabihf-1.30.2.tgz", - "integrity": "sha512-eVdpxh4wYcm0PofJIZVuYuLiqBIakQ9uFZmipf6LF/HRj5Bgm0eb3qL/mr1smyXIS1twwOxNWndd8z0E374hiA==", + "version": "1.32.0", + "resolved": "https://registry.npmjs.org/lightningcss-linux-arm-gnueabihf/-/lightningcss-linux-arm-gnueabihf-1.32.0.tgz", + "integrity": "sha512-x6rnnpRa2GL0zQOkt6rts3YDPzduLpWvwAF6EMhXFVZXD4tPrBkEFqzGowzCsIWsPjqSK+tyNEODUBXeeVHSkw==", "cpu": [ "arm" ], @@ -4312,9 +4319,9 @@ } }, "node_modules/lightningcss-linux-arm64-gnu": { - "version": "1.30.2", - "resolved": "https://registry.npmjs.org/lightningcss-linux-arm64-gnu/-/lightningcss-linux-arm64-gnu-1.30.2.tgz", - "integrity": "sha512-UK65WJAbwIJbiBFXpxrbTNArtfuznvxAJw4Q2ZGlU8kPeDIWEX1dg3rn2veBVUylA2Ezg89ktszWbaQnxD/e3A==", + "version": "1.32.0", + "resolved": "https://registry.npmjs.org/lightningcss-linux-arm64-gnu/-/lightningcss-linux-arm64-gnu-1.32.0.tgz", + "integrity": "sha512-0nnMyoyOLRJXfbMOilaSRcLH3Jw5z9HDNGfT/gwCPgaDjnx0i8w7vBzFLFR1f6CMLKF8gVbebmkUN3fa/kQJpQ==", "cpu": [ "arm64" ], @@ -4333,9 +4340,9 @@ } }, "node_modules/lightningcss-linux-arm64-musl": { - "version": "1.30.2", - "resolved": "https://registry.npmjs.org/lightningcss-linux-arm64-musl/-/lightningcss-linux-arm64-musl-1.30.2.tgz", - "integrity": "sha512-5Vh9dGeblpTxWHpOx8iauV02popZDsCYMPIgiuw97OJ5uaDsL86cnqSFs5LZkG3ghHoX5isLgWzMs+eD1YzrnA==", + "version": "1.32.0", + "resolved": "https://registry.npmjs.org/lightningcss-linux-arm64-musl/-/lightningcss-linux-arm64-musl-1.32.0.tgz", + "integrity": "sha512-UpQkoenr4UJEzgVIYpI80lDFvRmPVg6oqboNHfoH4CQIfNA+HOrZ7Mo7KZP02dC6LjghPQJeBsvXhJod/wnIBg==", "cpu": [ "arm64" ], @@ -4354,9 +4361,9 @@ } }, "node_modules/lightningcss-linux-x64-gnu": { - "version": "1.30.2", - "resolved": "https://registry.npmjs.org/lightningcss-linux-x64-gnu/-/lightningcss-linux-x64-gnu-1.30.2.tgz", - "integrity": "sha512-Cfd46gdmj1vQ+lR6VRTTadNHu6ALuw2pKR9lYq4FnhvgBc4zWY1EtZcAc6EffShbb1MFrIPfLDXD6Xprbnni4w==", + "version": "1.32.0", + "resolved": "https://registry.npmjs.org/lightningcss-linux-x64-gnu/-/lightningcss-linux-x64-gnu-1.32.0.tgz", + "integrity": "sha512-V7Qr52IhZmdKPVr+Vtw8o+WLsQJYCTd8loIfpDaMRWGUZfBOYEJeyJIkqGIDMZPwPx24pUMfwSxxI8phr/MbOA==", "cpu": [ "x64" ], @@ -4375,9 +4382,9 @@ } }, "node_modules/lightningcss-linux-x64-musl": { - "version": "1.30.2", - "resolved": "https://registry.npmjs.org/lightningcss-linux-x64-musl/-/lightningcss-linux-x64-musl-1.30.2.tgz", - "integrity": "sha512-XJaLUUFXb6/QG2lGIW6aIk6jKdtjtcffUT0NKvIqhSBY3hh9Ch+1LCeH80dR9q9LBjG3ewbDjnumefsLsP6aiA==", + "version": "1.32.0", + "resolved": "https://registry.npmjs.org/lightningcss-linux-x64-musl/-/lightningcss-linux-x64-musl-1.32.0.tgz", + "integrity": "sha512-bYcLp+Vb0awsiXg/80uCRezCYHNg1/l3mt0gzHnWV9XP1W5sKa5/TCdGWaR/zBM2PeF/HbsQv/j2URNOiVuxWg==", "cpu": [ "x64" ], @@ -4396,9 +4403,9 @@ } }, "node_modules/lightningcss-win32-arm64-msvc": { - "version": "1.30.2", - "resolved": "https://registry.npmjs.org/lightningcss-win32-arm64-msvc/-/lightningcss-win32-arm64-msvc-1.30.2.tgz", - "integrity": "sha512-FZn+vaj7zLv//D/192WFFVA0RgHawIcHqLX9xuWiQt7P0PtdFEVaxgF9rjM/IRYHQXNnk61/H/gb2Ei+kUQ4xQ==", + "version": "1.32.0", + "resolved": "https://registry.npmjs.org/lightningcss-win32-arm64-msvc/-/lightningcss-win32-arm64-msvc-1.32.0.tgz", + "integrity": "sha512-8SbC8BR40pS6baCM8sbtYDSwEVQd4JlFTOlaD3gWGHfThTcABnNDBda6eTZeqbofalIJhFx0qKzgHJmcPTnGdw==", "cpu": [ "arm64" ], @@ -4417,9 +4424,9 @@ } }, "node_modules/lightningcss-win32-x64-msvc": { - "version": "1.30.2", - "resolved": "https://registry.npmjs.org/lightningcss-win32-x64-msvc/-/lightningcss-win32-x64-msvc-1.30.2.tgz", - "integrity": "sha512-5g1yc73p+iAkid5phb4oVFMB45417DkRevRbt/El/gKXJk4jid+vPFF/AXbxn05Aky8PapwzZrdJShv5C0avjw==", + "version": "1.32.0", + "resolved": "https://registry.npmjs.org/lightningcss-win32-x64-msvc/-/lightningcss-win32-x64-msvc-1.32.0.tgz", + "integrity": "sha512-Amq9B/SoZYdDi1kFrojnoqPLxYhQ4Wo5XiL8EVJrVsB8ARoC1PWW6VGtT0WKCemjy8aC+louJnjS7U18x3b06Q==", "cpu": [ "x64" ], @@ -4458,9 +4465,9 @@ } }, "node_modules/lucide-react": { - "version": "0.475.0", - "resolved": "https://registry.npmjs.org/lucide-react/-/lucide-react-0.475.0.tgz", - "integrity": "sha512-NJzvVu1HwFVeZ+Gwq2q00KygM1aBhy/ZrhY9FsAgJtpB+E4R7uxRk9M2iKvHa6/vNxZydIB59htha4c2vvwvVg==", + "version": "0.577.0", + "resolved": "https://registry.npmjs.org/lucide-react/-/lucide-react-0.577.0.tgz", + "integrity": "sha512-4LjoFv2eEPwYDPg/CUdBJQSDfPyzXCRrVW1X7jrx/trgxnxkHFjnVZINbzvzxjN70dxychOfg+FTYwBiS3pQ5A==", "license": "ISC", "peerDependencies": { "react": "^16.5.1 || ^17.0.0 || ^18.0.0 || ^19.0.0" @@ -5309,9 +5316,9 @@ } }, "node_modules/react-hook-form": { - "version": "7.71.1", - "resolved": "https://registry.npmjs.org/react-hook-form/-/react-hook-form-7.71.1.tgz", - "integrity": "sha512-9SUJKCGKo8HUSsCO+y0CtqkqI5nNuaDqTxyqPsZPqIwudpj4rCrAz/jZV+jn57bx5gtZKOh3neQu94DXMc+w5w==", + "version": "7.72.0", + "resolved": "https://registry.npmjs.org/react-hook-form/-/react-hook-form-7.72.0.tgz", + "integrity": "sha512-V4v6jubaf6JAurEaVnT9aUPKFbNtDgohj5CIgVGyPHvT9wRx5OZHVjz31GsxnPNI278XMu+ruFz+wGOscHaLKw==", "license": "MIT", "engines": { "node": ">=18.0.0" @@ -5461,9 +5468,9 @@ } }, "node_modules/recharts": { - "version": "3.8.0", - "resolved": "https://registry.npmjs.org/recharts/-/recharts-3.8.0.tgz", - "integrity": "sha512-Z/m38DX3L73ExO4Tpc9/iZWHmHnlzWG4njQbxsF5aSjwqmHNDDIm0rdEBArkwsBvR8U6EirlEHiQNYWCVh9sGQ==", + "version": "3.8.1", + "resolved": "https://registry.npmjs.org/recharts/-/recharts-3.8.1.tgz", + "integrity": "sha512-mwzmO1s9sFL0TduUpwndxCUNoXsBw3u3E/0+A+cLcrSfQitSG62L32N69GhqUrrT5qKcAE3pCGVINC6pqkBBQg==", "license": "MIT", "workspaces": [ "www" @@ -5606,18 +5613,18 @@ } }, "node_modules/seroval": { - "version": "1.5.0", - "resolved": "https://registry.npmjs.org/seroval/-/seroval-1.5.0.tgz", - "integrity": "sha512-OE4cvmJ1uSPrKorFIH9/w/Qwuvi/IMcGbv5RKgcJ/zjA/IohDLU6SVaxFN9FwajbP7nsX0dQqMDes1whk3y+yw==", + "version": "1.5.1", + "resolved": "https://registry.npmjs.org/seroval/-/seroval-1.5.1.tgz", + "integrity": "sha512-OwrZRZAfhHww0WEnKHDY8OM0U/Qs8OTfIDWhUD4BLpNJUfXK4cGmjiagGze086m+mhI+V2nD0gfbHEnJjb9STA==", "license": "MIT", "engines": { "node": ">=10" } }, "node_modules/seroval-plugins": { - "version": "1.5.0", - "resolved": "https://registry.npmjs.org/seroval-plugins/-/seroval-plugins-1.5.0.tgz", - "integrity": "sha512-EAHqADIQondwRZIdeW2I636zgsODzoBDwb3PT/+7TLDWyw1Dy/Xv7iGUIEXXav7usHDE9HVhOU61irI3EnyyHA==", + "version": "1.5.1", + "resolved": "https://registry.npmjs.org/seroval-plugins/-/seroval-plugins-1.5.1.tgz", + "integrity": "sha512-4FbuZ/TMl02sqv0RTFexu0SP6V+ywaIe5bAWCCEik0fk17BhALgwvUDVF7e3Uvf9pxmwCEJsRPmlkUE6HdzLAw==", "license": "MIT", "engines": { "node": ">=10" @@ -5689,9 +5696,9 @@ } }, "node_modules/tailwind-merge": { - "version": "3.4.1", - "resolved": "https://registry.npmjs.org/tailwind-merge/-/tailwind-merge-3.4.1.tgz", - "integrity": "sha512-2OA0rFqWOkITEAOFWSBSApYkDeH9t2B3XSJuI4YztKBzK3mX0737A2qtxDZ7xkw9Zfh0bWl+r34sF3HXV+Ig7Q==", + "version": "3.5.0", + "resolved": "https://registry.npmjs.org/tailwind-merge/-/tailwind-merge-3.5.0.tgz", + "integrity": "sha512-I8K9wewnVDkL1NTGoqWmVEIlUcB9gFriAEkXkfCjX5ib8ezGxtR3xD7iZIxrfArjEsH7F1CHD4RFUtxefdqV/A==", "license": "MIT", "funding": { "type": "github", @@ -5699,15 +5706,15 @@ } }, "node_modules/tailwindcss": { - "version": "4.1.18", - "resolved": "https://registry.npmjs.org/tailwindcss/-/tailwindcss-4.1.18.tgz", - "integrity": "sha512-4+Z+0yiYyEtUVCScyfHCxOYP06L5Ne+JiHhY2IjR2KWMIWhJOYZKLSGZaP5HkZ8+bY0cxfzwDE5uOmzFXyIwxw==", + "version": "4.2.2", + "resolved": "https://registry.npmjs.org/tailwindcss/-/tailwindcss-4.2.2.tgz", + "integrity": "sha512-KWBIxs1Xb6NoLdMVqhbhgwZf2PGBpPEiwOqgI4pFIYbNTfBXiKYyWoTsXgBQ9WFg/OlhnvHaY+AEpW7wSmFo2Q==", "license": "MIT" }, "node_modules/tapable": { - "version": "2.3.0", - "resolved": "https://registry.npmjs.org/tapable/-/tapable-2.3.0.tgz", - "integrity": "sha512-g9ljZiwki/LfxmQADO3dEY1CbpmXT5Hm2fJ+QaGKwSXUylMybePR7/67YW7jOrrvjEgL1Fmz5kzyAjWVWLlucg==", + "version": "2.3.2", + "resolved": "https://registry.npmjs.org/tapable/-/tapable-2.3.2.tgz", + "integrity": "sha512-1MOpMXuhGzGL5TTCZFItxCc0AARf1EZFQkGqMm7ERKj8+Hgr5oLvJOVFcC+lRmR8hCe2S3jC4T5D7Vg/d7/fhA==", "dev": true, "license": "MIT", "engines": { @@ -5724,12 +5731,6 @@ "integrity": "sha512-+FbBPE1o9QAYvviau/qC5SE3caw21q3xkvWKBtja5vgqOWIHHJ3ioaq1VPfn/Szqctz2bU/oYeKd9/z5BL+PVg==", "license": "MIT" }, - "node_modules/tiny-warning": { - "version": "1.0.3", - "resolved": "https://registry.npmjs.org/tiny-warning/-/tiny-warning-1.0.3.tgz", - "integrity": "sha512-lBN9zLN/oAf68o3zNXYrdCt1kP8WsiGW8Oo2ka41b2IM5JL/S1CTyX1rW0mb/zSuJun0ZUrDxx4sqvYS2FWzPA==", - "license": "MIT" - }, "node_modules/tinyglobby": { "version": "0.2.15", "resolved": "https://registry.npmjs.org/tinyglobby/-/tinyglobby-0.2.15.tgz", diff --git a/web/package.json b/web/package.json index b31429fa..bf9f0bc4 100644 --- a/web/package.json +++ b/web/package.json @@ -17,7 +17,7 @@ "class-variance-authority": "^0.7.1", "clsx": "^2.1.1", "cmdk": "^1.1.1", - "lucide-react": "^0.475.0", + "lucide-react": "^0.577.0", "next-themes": "^0.4.6", "radix-ui": "^1.4.3", "react": "^19.1.0", From 47aaeb977f920f23f0f539eda714378e55d87c3f Mon Sep 17 00:00:00 2001 From: aaight Date: Fri, 3 Apr 2026 16:28:57 +0200 Subject: [PATCH 09/52] refactor(integrations): migrate integration checker to registry, delete legacy functions (#1077) Co-authored-by: Cascade Bot --- docs/architecture/04-agent-system.md | 2 +- src/agents/capabilities/resolver.ts | 37 ++-- src/github/integration.ts | 37 ---- src/github/scm-integration.ts | 11 +- src/pm/index.ts | 1 - src/pm/integration.ts | 28 --- src/sentry/alerting-integration.ts | 20 +-- src/sentry/integration.ts | 9 - .../integration-validation.test.ts | 30 +++- .../unit/agents/capabilities/resolver.test.ts | 110 ++++++++++++ tests/unit/github/integration.test.ts | 158 ----------------- tests/unit/pm/integration.test.ts | 160 ------------------ .../unit/sentry/alerting-integration.test.ts | 20 +-- tests/unit/sentry/integration.test.ts | 63 +------ 14 files changed, 179 insertions(+), 507 deletions(-) delete mode 100644 src/github/integration.ts delete mode 100644 tests/unit/github/integration.test.ts delete mode 100644 tests/unit/pm/integration.test.ts diff --git a/docs/architecture/04-agent-system.md b/docs/architecture/04-agent-system.md index eb583a7a..e385c82e 100644 --- a/docs/architecture/04-agent-system.md +++ b/docs/architecture/04-agent-system.md @@ -162,7 +162,7 @@ interface CapabilityDefinition { ```mermaid flowchart TD A["Agent definition
(capabilities.required + optional)"] --> B[Create integration checker] - B --> C["Check hasPmIntegration(),
hasScmIntegration(),
hasAlertingIntegration()"] + B --> C["integrationRegistry.getByCategory(cat)
.hasIntegration(projectId)
for pm, scm, alerting"] C --> D[resolveEffectiveCapabilities] D --> E["Built-in caps: always included"] D --> F["Integration caps: only if provider configured"] diff --git a/src/agents/capabilities/resolver.ts b/src/agents/capabilities/resolver.ts index c22e34a2..9cb5618f 100644 --- a/src/agents/capabilities/resolver.ts +++ b/src/agents/capabilities/resolver.ts @@ -44,6 +44,7 @@ import { Tmux } from '../../gadgets/tmux.js'; import { TodoDelete, TodoUpdateStatus, TodoUpsert } from '../../gadgets/todo/index.js'; import { VerifyChanges } from '../../gadgets/VerifyChanges.js'; import { WriteFile } from '../../gadgets/WriteFile.js'; +import { integrationRegistry } from '../../integrations/registry.js'; import type { ToolManifest } from '../contracts/index.js'; import type { IntegrationCategory } from '../definitions/schema.js'; import { @@ -378,28 +379,30 @@ export function generateUnavailableCapabilitiesNote(unavailableCaps: Capability[ * * This function pre-fetches integration availability for all categories * and returns a synchronous checker callback. + * + * Uses integrationRegistry.getByCategory() to check all registered integrations + * for each category — returns true if any integration in that category is configured. */ export async function createIntegrationChecker(projectId: string): Promise { - // Import integration checking functions dynamically to avoid circular deps - const [{ hasPmIntegration }, { hasScmIntegration }, { hasAlertingIntegration }] = - await Promise.all([ - import('../../pm/integration.js'), - import('../../github/integration.js'), - import('../../sentry/integration.js'), - ]); - - // Pre-fetch all integration statuses in parallel - const [hasPm, hasScm, hasAlerting] = await Promise.all([ - hasPmIntegration(projectId), - hasScmIntegration(projectId), - hasAlertingIntegration(projectId), - ]); + const categories: IntegrationCategory[] = ['pm', 'scm', 'alerting']; + + // Pre-fetch all integration statuses in parallel across all categories + const results = await Promise.all( + categories.map(async (cat) => { + const integrations = integrationRegistry.getByCategory(cat); + // Category is available if ANY registered integration for it is configured + const statuses = await Promise.all( + integrations.map((integration) => integration.hasIntegration(projectId)), + ); + return statuses.some(Boolean); + }), + ); // Return synchronous checker const availableIntegrations: Record = { - pm: hasPm, - scm: hasScm, - alerting: hasAlerting, + pm: results[0], + scm: results[1], + alerting: results[2], }; return (category: IntegrationCategory) => availableIntegrations[category] ?? false; diff --git a/src/github/integration.ts b/src/github/integration.ts deleted file mode 100644 index 9eccd21a..00000000 --- a/src/github/integration.ts +++ /dev/null @@ -1,37 +0,0 @@ -/** - * SCM (GitHub) integration — credential validation helpers. - * - * Provides hasScmIntegration() for checking if SCM integration is configured. - */ - -import { getIntegrationCredentialOrNull } from '../config/provider.js'; -import { getIntegrationProvider } from '../db/repositories/credentialsRepository.js'; - -/** - * Check if SCM integration is configured for a project. - * Returns true if the integration exists and has at least one token linked. - */ -export async function hasScmIntegration(projectId: string): Promise { - const provider = await getIntegrationProvider(projectId, 'scm'); - if (!provider) return false; - - // Check if either token is available (some agents only need one) - const [impl, rev] = await Promise.all([ - getIntegrationCredentialOrNull(projectId, 'scm', 'implementer_token'), - getIntegrationCredentialOrNull(projectId, 'scm', 'reviewer_token'), - ]); - - return impl !== null || rev !== null; -} - -/** - * Check if a specific SCM persona token is configured. - */ -export async function hasScmPersonaToken( - projectId: string, - persona: 'implementer' | 'reviewer', -): Promise { - const role = persona === 'implementer' ? 'implementer_token' : 'reviewer_token'; - const token = await getIntegrationCredentialOrNull(projectId, 'scm', role); - return token !== null; -} diff --git a/src/github/scm-integration.ts b/src/github/scm-integration.ts index 46c860bd..aedc6baf 100644 --- a/src/github/scm-integration.ts +++ b/src/github/scm-integration.ts @@ -4,13 +4,10 @@ * Encapsulates GitHub SCM credential resolution and validation * into a unified integration class following the IntegrationModule pattern. * - * Consolidates: - * - `hasScmIntegration()` logic from src/github/integration.ts - * - `hasScmPersonaToken()` logic from src/github/integration.ts - * - `withGitHubToken()` usage from src/github/client.ts - * - * Backward compatibility: the standalone functions in src/github/integration.ts - * remain exported and continue to work identically. + * Provides: + * - `hasIntegration()` — checks if at least one token (implementer or reviewer) is configured + * - `hasPersonaToken()` — checks if a specific persona token is configured + * - `withCredentials()` — runs a function within the implementer token credential scope */ import { getIntegrationCredential, getIntegrationCredentialOrNull } from '../config/provider.js'; diff --git a/src/pm/index.ts b/src/pm/index.ts index 332e2894..c8ec7d0b 100644 --- a/src/pm/index.ts +++ b/src/pm/index.ts @@ -1,7 +1,6 @@ export { getPMProvider, getPMProviderOrNull, withPMProvider } from './context.js'; // PMIntegration interface + registry export type { PMIntegration, PMWebhookEvent } from './integration.js'; -export { hasPmIntegration } from './integration.js'; export { JiraPMProvider } from './jira/adapter.js'; export type { ProjectPMConfig } from './lifecycle.js'; export { hasAutoLabel, PMLifecycleManager, resolveProjectPMConfig } from './lifecycle.js'; diff --git a/src/pm/integration.ts b/src/pm/integration.ts index cfe3b157..8f915cbc 100644 --- a/src/pm/integration.ts +++ b/src/pm/integration.ts @@ -11,9 +11,6 @@ * Extends IntegrationModule so PM providers participate in the unified registry. */ -import { PROVIDER_CREDENTIAL_ROLES } from '../config/integrationRoles.js'; -import { getIntegrationCredentialOrNull } from '../config/provider.js'; -import { getIntegrationProvider } from '../db/repositories/credentialsRepository.js'; import type { IntegrationModule } from '../integrations/types.js'; import type { AgentExecutionConfig } from '../triggers/shared/agent-execution.js'; import type { CascadeConfig, ProjectConfig } from '../types/index.js'; @@ -92,28 +89,3 @@ export interface PMIntegration extends IntegrationModule { /** Extract a work item ID from text (e.g. PR body). Returns null if not found. */ extractWorkItemId(text: string): string | null; } - -// ============================================================================ -// Integration check helpers -// ============================================================================ - -/** - * Check if PM integration is configured for a project. - * Returns true if a PM integration exists with all required credentials present. - * - * Uses the data-driven PROVIDER_CREDENTIAL_ROLES table so this function - * does not need to be updated when a new PM provider is added. - */ -export async function hasPmIntegration(projectId: string): Promise { - const provider = await getIntegrationProvider(projectId, 'pm'); - if (!provider) return false; - - const roles = PROVIDER_CREDENTIAL_ROLES[provider as keyof typeof PROVIDER_CREDENTIAL_ROLES]; - if (!roles || roles.length === 0) return false; - - const requiredRoles = roles.filter((r) => !r.optional); - const values = await Promise.all( - requiredRoles.map((roleDef) => getIntegrationCredentialOrNull(projectId, 'pm', roleDef.role)), - ); - return values.every((v) => v !== null); -} diff --git a/src/sentry/alerting-integration.ts b/src/sentry/alerting-integration.ts index 43cfc4d0..3f44e8ec 100644 --- a/src/sentry/alerting-integration.ts +++ b/src/sentry/alerting-integration.ts @@ -4,21 +4,13 @@ * Encapsulates Sentry alerting credential resolution and validation * into a unified integration class following the IntegrationModule pattern. * - * Consolidates: - * - `getSentryIntegrationConfig()` logic from src/sentry/integration.ts - * - `hasAlertingIntegration()` logic from src/sentry/integration.ts - * - * Backward compatibility: the standalone functions in src/sentry/integration.ts - * remain exported and continue to work identically. + * Inlines the hasIntegration logic directly (calls getSentryIntegrationConfig) + * rather than delegating to the now-deleted hasAlertingIntegration() standalone function. */ import { getIntegrationCredential } from '../config/provider.js'; import type { AlertingIntegration } from '../integrations/alerting.js'; -import { - getSentryIntegrationConfig, - hasAlertingIntegration, - type SentryIntegrationConfig, -} from './integration.js'; +import { getSentryIntegrationConfig, type SentryIntegrationConfig } from './integration.js'; export class SentryAlertingIntegration implements AlertingIntegration { readonly type = 'sentry'; @@ -26,15 +18,15 @@ export class SentryAlertingIntegration implements AlertingIntegration { /** * Check if Sentry alerting integration is configured for a project. - * Delegates to existing hasAlertingIntegration() logic. + * Returns true if getSentryIntegrationConfig returns a valid config. */ async hasIntegration(projectId: string): Promise { - return hasAlertingIntegration(projectId); + const config = await getSentryIntegrationConfig(projectId); + return config !== null; } /** * Get the Sentry integration config for a project. - * Delegates to existing getSentryIntegrationConfig() logic. */ async getConfig(projectId: string): Promise { return getSentryIntegrationConfig(projectId); diff --git a/src/sentry/integration.ts b/src/sentry/integration.ts index e8894ca2..54acdb18 100644 --- a/src/sentry/integration.ts +++ b/src/sentry/integration.ts @@ -37,12 +37,3 @@ export async function getSentryIntegrationConfig( organizationSlug: config.organizationSlug, }; } - -/** - * Returns true if a Sentry alerting integration is configured for the project. - * Used by createIntegrationChecker() in the capability resolver. - */ -export async function hasAlertingIntegration(projectId: string): Promise { - const config = await getSentryIntegrationConfig(projectId); - return config !== null; -} diff --git a/tests/integration/integration-validation.test.ts b/tests/integration/integration-validation.test.ts index e2fadcc7..205180c3 100644 --- a/tests/integration/integration-validation.test.ts +++ b/tests/integration/integration-validation.test.ts @@ -12,13 +12,13 @@ */ import { beforeAll, beforeEach, describe, expect, it, vi } from 'vitest'; -import { hasScmIntegration, hasScmPersonaToken } from '../../src/github/integration.js'; // Bootstrap the integration registry so validateIntegrations() can find registered modules. // The new registry-driven implementation requires integrations to be registered before // calling getByCategory() — without this import the registry is empty and all validations // report "none is registered" instead of checking actual project credentials. import '../../src/integrations/bootstrap.js'; -import { hasPmIntegration } from '../../src/pm/integration.js'; +import { integrationRegistry } from '../../src/integrations/registry.js'; +import type { SCMIntegration } from '../../src/integrations/scm.js'; import { formatValidationErrors, getIntegrationRequirements, @@ -50,6 +50,32 @@ beforeAll(async () => { await truncateAll(); }); +// Helper functions using the integration registry +async function hasPmIntegration(projectId: string): Promise { + const integrations = integrationRegistry.getByCategory('pm'); + const statuses = await Promise.all(integrations.map((i) => i.hasIntegration(projectId))); + return statuses.some(Boolean); +} + +async function hasScmIntegration(projectId: string): Promise { + const integrations = integrationRegistry.getByCategory('scm'); + const statuses = await Promise.all(integrations.map((i) => i.hasIntegration(projectId))); + return statuses.some(Boolean); +} + +async function hasScmPersonaToken( + projectId: string, + persona: 'implementer' | 'reviewer', +): Promise { + const integrations = integrationRegistry.getByCategory('scm'); + const statuses = await Promise.all( + integrations + .filter((i): i is SCMIntegration => 'hasPersonaToken' in i) + .map((i) => i.hasPersonaToken(projectId, persona)), + ); + return statuses.some(Boolean); +} + describe('Integration Validation (integration)', () => { beforeEach(async () => { await truncateAll(); diff --git a/tests/unit/agents/capabilities/resolver.test.ts b/tests/unit/agents/capabilities/resolver.test.ts index d03d8932..6a189b42 100644 --- a/tests/unit/agents/capabilities/resolver.test.ts +++ b/tests/unit/agents/capabilities/resolver.test.ts @@ -6,6 +6,14 @@ function mockClass(name: string) { return vi.fn().mockImplementation(() => new cls()); } +// Mock integrationRegistry +const mockGetByCategory = vi.fn(); +vi.mock('../../../../src/integrations/registry.js', () => ({ + integrationRegistry: { + getByCategory: (...args: unknown[]) => mockGetByCategory(...args), + }, +})); + // Mock all gadget imports vi.mock('../../../../src/gadgets/AstGrep.js', () => ({ AstGrep: mockClass('AstGrep') })); vi.mock('../../../../src/gadgets/FileMultiEdit.js', () => ({ @@ -57,6 +65,7 @@ vi.mock('../../../../src/gadgets/todo/index.js', () => ({ import type { Capability } from '../../../../src/agents/capabilities/index.js'; import { + createIntegrationChecker, deriveIntegrations, deriveRequiredIntegrations, filterToolManifests, @@ -291,3 +300,104 @@ describe('filterToolManifests', () => { warnSpy.mockRestore(); }); }); + +describe('createIntegrationChecker', () => { + it('returns true for pm category when a pm integration is configured', async () => { + const mockPmIntegration = { hasIntegration: vi.fn().mockResolvedValue(true) }; + mockGetByCategory.mockImplementation((cat: string) => { + if (cat === 'pm') return [mockPmIntegration]; + return []; + }); + + const checker = await createIntegrationChecker('proj-1'); + + expect(checker('pm')).toBe(true); + expect(checker('scm')).toBe(false); + expect(checker('alerting')).toBe(false); + }); + + it('returns true for scm category when a scm integration is configured', async () => { + const mockScmIntegration = { hasIntegration: vi.fn().mockResolvedValue(true) }; + mockGetByCategory.mockImplementation((cat: string) => { + if (cat === 'scm') return [mockScmIntegration]; + return []; + }); + + const checker = await createIntegrationChecker('proj-1'); + + expect(checker('scm')).toBe(true); + expect(checker('pm')).toBe(false); + expect(checker('alerting')).toBe(false); + }); + + it('returns true for alerting category when an alerting integration is configured', async () => { + const mockAlertingIntegration = { hasIntegration: vi.fn().mockResolvedValue(true) }; + mockGetByCategory.mockImplementation((cat: string) => { + if (cat === 'alerting') return [mockAlertingIntegration]; + return []; + }); + + const checker = await createIntegrationChecker('proj-1'); + + expect(checker('alerting')).toBe(true); + expect(checker('pm')).toBe(false); + expect(checker('scm')).toBe(false); + }); + + it('returns false for all categories when no integrations are configured', async () => { + mockGetByCategory.mockReturnValue([]); + + const checker = await createIntegrationChecker('proj-1'); + + expect(checker('pm')).toBe(false); + expect(checker('scm')).toBe(false); + expect(checker('alerting')).toBe(false); + }); + + it('returns false when integration hasIntegration() returns false', async () => { + const mockIntegration = { hasIntegration: vi.fn().mockResolvedValue(false) }; + mockGetByCategory.mockImplementation((cat: string) => { + if (cat === 'pm') return [mockIntegration]; + return []; + }); + + const checker = await createIntegrationChecker('proj-1'); + + expect(checker('pm')).toBe(false); + }); + + it('returns true if ANY integration in a category is configured (OR logic)', async () => { + const mockInt1 = { hasIntegration: vi.fn().mockResolvedValue(false) }; + const mockInt2 = { hasIntegration: vi.fn().mockResolvedValue(true) }; + mockGetByCategory.mockImplementation((cat: string) => { + if (cat === 'pm') return [mockInt1, mockInt2]; + return []; + }); + + const checker = await createIntegrationChecker('proj-1'); + + expect(checker('pm')).toBe(true); + }); + + it('calls hasIntegration with the correct projectId', async () => { + const mockIntegration = { hasIntegration: vi.fn().mockResolvedValue(true) }; + mockGetByCategory.mockImplementation((cat: string) => { + if (cat === 'pm') return [mockIntegration]; + return []; + }); + + await createIntegrationChecker('my-project-id'); + + expect(mockIntegration.hasIntegration).toHaveBeenCalledWith('my-project-id'); + }); + + it('calls getByCategory for pm, scm, and alerting', async () => { + mockGetByCategory.mockReturnValue([]); + + await createIntegrationChecker('proj-1'); + + expect(mockGetByCategory).toHaveBeenCalledWith('pm'); + expect(mockGetByCategory).toHaveBeenCalledWith('scm'); + expect(mockGetByCategory).toHaveBeenCalledWith('alerting'); + }); +}); diff --git a/tests/unit/github/integration.test.ts b/tests/unit/github/integration.test.ts deleted file mode 100644 index 23760389..00000000 --- a/tests/unit/github/integration.test.ts +++ /dev/null @@ -1,158 +0,0 @@ -import { describe, expect, it, vi } from 'vitest'; - -// --------------------------------------------------------------------------- -// Mocks -// --------------------------------------------------------------------------- - -const mockGetIntegrationProvider = vi.fn(); -vi.mock('../../../src/db/repositories/credentialsRepository.js', () => ({ - getIntegrationProvider: (...args: unknown[]) => mockGetIntegrationProvider(...args), -})); - -const mockGetIntegrationCredentialOrNull = vi.fn(); -vi.mock('../../../src/config/provider.js', () => ({ - getIntegrationCredentialOrNull: (...args: unknown[]) => - mockGetIntegrationCredentialOrNull(...args), -})); - -import { hasScmIntegration, hasScmPersonaToken } from '../../../src/github/integration.js'; - -// --------------------------------------------------------------------------- -// Tests -// --------------------------------------------------------------------------- - -describe('hasScmIntegration', () => { - it('returns false when no SCM integration provider configured', async () => { - mockGetIntegrationProvider.mockResolvedValue(null); - - const result = await hasScmIntegration('proj-1'); - - expect(result).toBe(false); - expect(mockGetIntegrationCredentialOrNull).not.toHaveBeenCalled(); - }); - - it('returns true when implementer_token is present (reviewer absent)', async () => { - mockGetIntegrationProvider.mockResolvedValue('github'); - mockGetIntegrationCredentialOrNull - .mockResolvedValueOnce('ghp_implementer_token') // implementer_token - .mockResolvedValueOnce(null); // reviewer_token - - const result = await hasScmIntegration('proj-1'); - - expect(result).toBe(true); - expect(mockGetIntegrationCredentialOrNull).toHaveBeenCalledWith( - 'proj-1', - 'scm', - 'implementer_token', - ); - }); - - it('returns true when reviewer_token is present (implementer absent)', async () => { - mockGetIntegrationProvider.mockResolvedValue('github'); - mockGetIntegrationCredentialOrNull - .mockResolvedValueOnce(null) // implementer_token - .mockResolvedValueOnce('ghp_reviewer_token'); // reviewer_token - - const result = await hasScmIntegration('proj-1'); - - expect(result).toBe(true); - }); - - it('returns true when both tokens are present', async () => { - mockGetIntegrationProvider.mockResolvedValue('github'); - mockGetIntegrationCredentialOrNull - .mockResolvedValueOnce('ghp_impl') - .mockResolvedValueOnce('ghp_rev'); - - const result = await hasScmIntegration('proj-1'); - - expect(result).toBe(true); - }); - - it('returns false when provider exists but both tokens are missing', async () => { - mockGetIntegrationProvider.mockResolvedValue('github'); - mockGetIntegrationCredentialOrNull - .mockResolvedValueOnce(null) // implementer_token - .mockResolvedValueOnce(null); // reviewer_token - - const result = await hasScmIntegration('proj-1'); - - expect(result).toBe(false); - }); - - it('passes correct projectId and category to getIntegrationProvider', async () => { - mockGetIntegrationProvider.mockResolvedValue(null); - - await hasScmIntegration('my-project'); - - expect(mockGetIntegrationProvider).toHaveBeenCalledWith('my-project', 'scm'); - }); -}); - -describe('hasScmPersonaToken', () => { - it('returns true when implementer token is present', async () => { - mockGetIntegrationCredentialOrNull.mockResolvedValue('ghp_implementer'); - - const result = await hasScmPersonaToken('proj-1', 'implementer'); - - expect(result).toBe(true); - expect(mockGetIntegrationCredentialOrNull).toHaveBeenCalledWith( - 'proj-1', - 'scm', - 'implementer_token', - ); - }); - - it('returns false when implementer token is absent', async () => { - mockGetIntegrationCredentialOrNull.mockResolvedValue(null); - - const result = await hasScmPersonaToken('proj-1', 'implementer'); - - expect(result).toBe(false); - }); - - it('returns true when reviewer token is present', async () => { - mockGetIntegrationCredentialOrNull.mockResolvedValue('ghp_reviewer'); - - const result = await hasScmPersonaToken('proj-1', 'reviewer'); - - expect(result).toBe(true); - expect(mockGetIntegrationCredentialOrNull).toHaveBeenCalledWith( - 'proj-1', - 'scm', - 'reviewer_token', - ); - }); - - it('returns false when reviewer token is absent', async () => { - mockGetIntegrationCredentialOrNull.mockResolvedValue(null); - - const result = await hasScmPersonaToken('proj-1', 'reviewer'); - - expect(result).toBe(false); - }); - - it('maps implementer persona to implementer_token role', async () => { - mockGetIntegrationCredentialOrNull.mockResolvedValue('some-token'); - - await hasScmPersonaToken('proj-2', 'implementer'); - - expect(mockGetIntegrationCredentialOrNull).toHaveBeenCalledWith( - 'proj-2', - 'scm', - 'implementer_token', - ); - }); - - it('maps reviewer persona to reviewer_token role', async () => { - mockGetIntegrationCredentialOrNull.mockResolvedValue('some-token'); - - await hasScmPersonaToken('proj-2', 'reviewer'); - - expect(mockGetIntegrationCredentialOrNull).toHaveBeenCalledWith( - 'proj-2', - 'scm', - 'reviewer_token', - ); - }); -}); diff --git a/tests/unit/pm/integration.test.ts b/tests/unit/pm/integration.test.ts deleted file mode 100644 index f8d186e2..00000000 --- a/tests/unit/pm/integration.test.ts +++ /dev/null @@ -1,160 +0,0 @@ -import { beforeEach, describe, expect, it, vi } from 'vitest'; - -// --------------------------------------------------------------------------- -// Mocks -// --------------------------------------------------------------------------- - -const mockGetIntegrationProvider = vi.fn(); -vi.mock('../../../src/db/repositories/credentialsRepository.js', () => ({ - getIntegrationProvider: (...args: unknown[]) => mockGetIntegrationProvider(...args), -})); - -const mockGetIntegrationCredentialOrNull = vi.fn(); -vi.mock('../../../src/config/provider.js', () => ({ - getIntegrationCredentialOrNull: (...args: unknown[]) => - mockGetIntegrationCredentialOrNull(...args), -})); - -import { hasPmIntegration } from '../../../src/pm/integration.js'; - -// --------------------------------------------------------------------------- -// Tests -// --------------------------------------------------------------------------- - -describe('hasPmIntegration', () => { - it('returns false when no PM integration provider configured', async () => { - mockGetIntegrationProvider.mockResolvedValue(null); - - const result = await hasPmIntegration('proj-1'); - - expect(result).toBe(false); - expect(mockGetIntegrationCredentialOrNull).not.toHaveBeenCalled(); - }); - - it('returns false when provider is unknown (not in PROVIDER_CREDENTIAL_ROLES)', async () => { - mockGetIntegrationProvider.mockResolvedValue('unknown-provider'); - - const result = await hasPmIntegration('proj-1'); - - expect(result).toBe(false); - }); - - it('passes projectId and "pm" category to getIntegrationProvider', async () => { - mockGetIntegrationProvider.mockResolvedValue(null); - - await hasPmIntegration('my-project'); - - expect(mockGetIntegrationProvider).toHaveBeenCalledWith('my-project', 'pm'); - }); - - // ========================================================================= - // Trello - // ========================================================================= - describe('trello provider', () => { - beforeEach(() => { - mockGetIntegrationProvider.mockResolvedValue('trello'); - }); - - it('returns true when all required trello credentials are present', async () => { - // Trello required roles: api_key, token (api_secret is optional) - mockGetIntegrationCredentialOrNull - .mockResolvedValueOnce('my-api-key') // api_key - .mockResolvedValueOnce('my-token'); // token - - const result = await hasPmIntegration('proj-1'); - - expect(result).toBe(true); - }); - - it('returns false when trello api_key is missing', async () => { - mockGetIntegrationCredentialOrNull - .mockResolvedValueOnce(null) // api_key missing - .mockResolvedValueOnce('my-token'); // token present - - const result = await hasPmIntegration('proj-1'); - - expect(result).toBe(false); - }); - - it('returns false when trello token is missing', async () => { - mockGetIntegrationCredentialOrNull - .mockResolvedValueOnce('my-api-key') // api_key present - .mockResolvedValueOnce(null); // token missing - - const result = await hasPmIntegration('proj-1'); - - expect(result).toBe(false); - }); - - it('returns false when both required trello credentials are missing', async () => { - mockGetIntegrationCredentialOrNull.mockResolvedValue(null); - - const result = await hasPmIntegration('proj-1'); - - expect(result).toBe(false); - }); - - it('checks required roles (api_key, token) — not optional api_secret', async () => { - // Required: api_key, token. Optional: api_secret - // If api_key and token present → true, regardless of api_secret - mockGetIntegrationCredentialOrNull - .mockResolvedValueOnce('my-api-key') - .mockResolvedValueOnce('my-token'); - - const result = await hasPmIntegration('proj-1'); - - expect(result).toBe(true); - // Should only have checked 2 required credentials (not 3) - expect(mockGetIntegrationCredentialOrNull).toHaveBeenCalledTimes(2); - }); - }); - - // ========================================================================= - // JIRA - // ========================================================================= - describe('jira provider', () => { - beforeEach(() => { - mockGetIntegrationProvider.mockResolvedValue('jira'); - }); - - it('returns true when all required jira credentials are present', async () => { - // JIRA required roles: email, api_token - mockGetIntegrationCredentialOrNull - .mockResolvedValueOnce('bot@example.com') // email - .mockResolvedValueOnce('api-token-xxx'); // api_token - - const result = await hasPmIntegration('proj-1'); - - expect(result).toBe(true); - }); - - it('returns false when jira email is missing', async () => { - mockGetIntegrationCredentialOrNull - .mockResolvedValueOnce(null) // email missing - .mockResolvedValueOnce('api-token-xxx'); - - const result = await hasPmIntegration('proj-1'); - - expect(result).toBe(false); - }); - - it('returns false when jira api_token is missing', async () => { - mockGetIntegrationCredentialOrNull - .mockResolvedValueOnce('bot@example.com') - .mockResolvedValueOnce(null); // api_token missing - - const result = await hasPmIntegration('proj-1'); - - expect(result).toBe(false); - }); - - it('checks for pm category credentials for jira', async () => { - mockGetIntegrationCredentialOrNull.mockResolvedValue('value'); - - await hasPmIntegration('proj-1'); - - expect(mockGetIntegrationCredentialOrNull).toHaveBeenCalledWith('proj-1', 'pm', 'email'); - expect(mockGetIntegrationCredentialOrNull).toHaveBeenCalledWith('proj-1', 'pm', 'api_token'); - }); - }); -}); diff --git a/tests/unit/sentry/alerting-integration.test.ts b/tests/unit/sentry/alerting-integration.test.ts index 7cfd7d31..fbc88b73 100644 --- a/tests/unit/sentry/alerting-integration.test.ts +++ b/tests/unit/sentry/alerting-integration.test.ts @@ -11,11 +11,9 @@ vi.mock('../../../src/config/provider.js', () => ({ })); const mockGetSentryIntegrationConfig = vi.fn(); -const mockHasAlertingIntegration = vi.fn(); vi.mock('../../../src/sentry/integration.js', () => ({ getSentryIntegrationConfig: (...args: unknown[]) => mockGetSentryIntegrationConfig(...args), - hasAlertingIntegration: (...args: unknown[]) => mockHasAlertingIntegration(...args), })); import { SentryAlertingIntegration } from '../../../src/sentry/alerting-integration.js'; @@ -49,30 +47,30 @@ describe('SentryAlertingIntegration', () => { // hasIntegration // ========================================================================= describe('hasIntegration', () => { - it('returns true when sentry integration is configured', async () => { - mockHasAlertingIntegration.mockResolvedValue(true); + it('returns true when sentry integration config is non-null', async () => { + mockGetSentryIntegrationConfig.mockResolvedValue({ organizationSlug: 'my-org' }); const result = await integration.hasIntegration('proj-1'); expect(result).toBe(true); - expect(mockHasAlertingIntegration).toHaveBeenCalledWith('proj-1'); + expect(mockGetSentryIntegrationConfig).toHaveBeenCalledWith('proj-1'); }); - it('returns false when sentry integration is not configured', async () => { - mockHasAlertingIntegration.mockResolvedValue(false); + it('returns false when sentry integration config is null', async () => { + mockGetSentryIntegrationConfig.mockResolvedValue(null); const result = await integration.hasIntegration('proj-1'); expect(result).toBe(false); - expect(mockHasAlertingIntegration).toHaveBeenCalledWith('proj-1'); + expect(mockGetSentryIntegrationConfig).toHaveBeenCalledWith('proj-1'); }); - it('delegates to hasAlertingIntegration() with the correct projectId', async () => { - mockHasAlertingIntegration.mockResolvedValue(true); + it('calls getSentryIntegrationConfig with the correct projectId', async () => { + mockGetSentryIntegrationConfig.mockResolvedValue(null); await integration.hasIntegration('my-project-id'); - expect(mockHasAlertingIntegration).toHaveBeenCalledWith('my-project-id'); + expect(mockGetSentryIntegrationConfig).toHaveBeenCalledWith('my-project-id'); }); }); diff --git a/tests/unit/sentry/integration.test.ts b/tests/unit/sentry/integration.test.ts index cec38e40..08264b05 100644 --- a/tests/unit/sentry/integration.test.ts +++ b/tests/unit/sentry/integration.test.ts @@ -5,10 +5,7 @@ vi.mock('../../../src/db/repositories/integrationsRepository.js', () => ({ })); import { getIntegrationByProjectAndCategory } from '../../../src/db/repositories/integrationsRepository.js'; -import { - getSentryIntegrationConfig, - hasAlertingIntegration, -} from '../../../src/sentry/integration.js'; +import { getSentryIntegrationConfig } from '../../../src/sentry/integration.js'; const mockGetIntegrationByProjectAndCategory = vi.mocked(getIntegrationByProjectAndCategory); @@ -110,62 +107,4 @@ describe('sentry/integration', () => { ); }); }); - - describe('hasAlertingIntegration', () => { - it('returns true when sentry integration is configured', async () => { - mockGetIntegrationByProjectAndCategory.mockResolvedValueOnce({ - id: 'int-1', - provider: 'sentry', - config: { organizationSlug: 'my-org' }, - }); - - const result = await hasAlertingIntegration('proj-1'); - - expect(result).toBe(true); - }); - - it('returns false when no integration exists', async () => { - mockGetIntegrationByProjectAndCategory.mockResolvedValueOnce(null); - - const result = await hasAlertingIntegration('proj-1'); - - expect(result).toBe(false); - }); - - it('returns false when integration has wrong provider', async () => { - mockGetIntegrationByProjectAndCategory.mockResolvedValueOnce({ - id: 'int-1', - provider: 'pagerduty', - config: { organizationSlug: 'my-org' }, - }); - - const result = await hasAlertingIntegration('proj-1'); - - expect(result).toBe(false); - }); - - it('returns false when integration config is missing organizationSlug', async () => { - mockGetIntegrationByProjectAndCategory.mockResolvedValueOnce({ - id: 'int-1', - provider: 'sentry', - config: {}, - }); - - const result = await hasAlertingIntegration('proj-1'); - - expect(result).toBe(false); - }); - - it('delegates to getSentryIntegrationConfig (not null => true)', async () => { - mockGetIntegrationByProjectAndCategory.mockResolvedValueOnce({ - id: 'int-1', - provider: 'sentry', - config: { organizationSlug: 'org-slug' }, - }); - - const result = await hasAlertingIntegration('proj-with-sentry'); - - expect(result).toBe(true); - }); - }); }); From 12dd91713a98569476e62e8a2e6c590b746f9c39 Mon Sep 17 00:00:00 2001 From: aaight Date: Fri, 3 Apr 2026 18:19:05 +0200 Subject: [PATCH 10/52] fix(pm): consolidate PM registration to single canonical path in integrations/bootstrap.ts (#1078) * fix(pm): consolidate PM registration to single canonical path in integrations/bootstrap.ts * fix(test): add bootstrap import to pm-provider-switching integration test After removing the side-effect registration from src/pm/index.ts, the pm-provider-switching integration test no longer had pmRegistry populated, causing the pmRegistry assertions and createPMProvider calls to fail. Add the canonical bootstrap import (matching integration-validation.test.ts). Co-Authored-By: Claude Opus 4.6 --------- Co-authored-by: Cascade Bot Co-authored-by: Claude Opus 4.6 --- CLAUDE.md | 3 +- src/integrations/registry.ts | 2 +- src/pm/bootstrap.ts | 43 ------------------- src/pm/index.ts | 12 ------ src/pm/registry.ts | 6 +-- .../integration/pm-provider-switching.test.ts | 3 ++ tests/unit/cli/credential-scoping.test.ts | 39 +++++++++++++++++ tests/unit/pm/factory.test.ts | 30 +++++++++++++ tests/unit/pm/lifecycle.test.ts | 31 ++++++++++++- tests/unit/router/webhook-signature.test.ts | 2 - tests/unit/triggers/jira-label-added.test.ts | 16 +++++-- tests/unit/triggers/pr-merged.test.ts | 13 ++++-- tests/unit/triggers/pr-ready-to-merge.test.ts | 13 ++++-- 13 files changed, 139 insertions(+), 74 deletions(-) delete mode 100644 src/pm/bootstrap.ts diff --git a/CLAUDE.md b/CLAUDE.md index 0b3eb1db..d288d819 100644 --- a/CLAUDE.md +++ b/CLAUDE.md @@ -167,8 +167,7 @@ integrationRegistry.getOrNull('sentry') // null if missing integrationRegistry.getByCategory('pm') // all PM integrations ``` -PM integrations are also registered in `pmRegistry` (`src/pm/registry.ts`) for backward -compatibility with existing PM-specific callers. +PM integrations are registered in `pmRegistry` via `src/integrations/bootstrap.ts` (the single canonical registration point). ### Credential roles diff --git a/src/integrations/registry.ts b/src/integrations/registry.ts index d278689a..ef994b48 100644 --- a/src/integrations/registry.ts +++ b/src/integrations/registry.ts @@ -74,5 +74,5 @@ export class IntegrationRegistry { } } -/** Singleton registry, populated at import time by each integration module */ +/** Singleton registry, populated at bootstrap time by src/integrations/bootstrap.ts */ export const integrationRegistry = new IntegrationRegistry(); diff --git a/src/pm/bootstrap.ts b/src/pm/bootstrap.ts deleted file mode 100644 index df435a47..00000000 --- a/src/pm/bootstrap.ts +++ /dev/null @@ -1,43 +0,0 @@ -/** - * Integration bootstrap — safe to import from the router. - * - * Registers all built-in integrations (PM and SCM) into their respective - * registries without pulling in the full agent execution pipeline (no - * processPMWebhook, no template files, no agent execution dependencies). - * - * Import this module from the router entry point to ensure integrations - * are available before any platform adapters are called. Each integration - * class is standalone (HTTP-based, no agent pipeline dependencies). - * - * Adding a new PM integration requires: - * 1. Implementing PMIntegration in `pm//integration.ts` - * 2. Registering it here. - * - * Adding a new SCM integration requires: - * 1. Implementing SCMIntegration in `github/scm-integration.ts` (or similar) - * 2. Registering it here. - */ - -import { GitHubSCMIntegration } from '../github/scm-integration.js'; -import { integrationRegistry } from '../integrations/registry.js'; -import { SentryAlertingIntegration } from '../sentry/alerting-integration.js'; -import { JiraIntegration } from './jira/integration.js'; -import { pmRegistry } from './registry.js'; -import { TrelloIntegration } from './trello/integration.js'; - -if (!pmRegistry.getOrNull('trello')) { - const trello = new TrelloIntegration(); - pmRegistry.register(trello); - if (!integrationRegistry.getOrNull('trello')) integrationRegistry.register(trello); -} -if (!pmRegistry.getOrNull('jira')) { - const jira = new JiraIntegration(); - pmRegistry.register(jira); - if (!integrationRegistry.getOrNull('jira')) integrationRegistry.register(jira); -} -if (!integrationRegistry.getOrNull('github')) { - integrationRegistry.register(new GitHubSCMIntegration()); -} -if (!integrationRegistry.getOrNull('sentry')) { - integrationRegistry.register(new SentryAlertingIntegration()); -} diff --git a/src/pm/index.ts b/src/pm/index.ts index c8ec7d0b..b8364a76 100644 --- a/src/pm/index.ts +++ b/src/pm/index.ts @@ -27,22 +27,10 @@ export type { } from './types.js'; export { processPMWebhook } from './webhook-handler.js'; -import { integrationRegistry } from '../integrations/registry.js'; import type { ProjectConfig } from '../types/index.js'; -import { JiraIntegration } from './jira/integration.js'; import { pmRegistry } from './registry.js'; -// Register built-in integrations at import time -import { TrelloIntegration } from './trello/integration.js'; import type { PMProvider } from './types.js'; -const trelloIntegration = new TrelloIntegration(); -pmRegistry.register(trelloIntegration); -if (!integrationRegistry.getOrNull('trello')) integrationRegistry.register(trelloIntegration); - -const jiraIntegration = new JiraIntegration(); -pmRegistry.register(jiraIntegration); -if (!integrationRegistry.getOrNull('jira')) integrationRegistry.register(jiraIntegration); - export function createPMProvider(project: ProjectConfig): PMProvider { return pmRegistry.createProvider(project); } diff --git a/src/pm/registry.ts b/src/pm/registry.ts index 9737498a..8822565e 100644 --- a/src/pm/registry.ts +++ b/src/pm/registry.ts @@ -1,8 +1,8 @@ /** * PMIntegrationRegistry — singleton that holds all registered PM integrations. * - * Populated at import time by each integration module. The router, worker, - * and shared infrastructure use `pmRegistry.get(type)` to obtain the + * Populated at bootstrap time by `src/integrations/bootstrap.ts`. The router, + * worker, and shared infrastructure use `pmRegistry.get(type)` to obtain the * integration instance without provider-specific branching. */ @@ -49,5 +49,5 @@ class PMIntegrationRegistry { } } -/** Singleton registry, populated at import time */ +/** Singleton registry, populated at bootstrap time by `src/integrations/bootstrap.ts` */ export const pmRegistry = new PMIntegrationRegistry(); diff --git a/tests/integration/pm-provider-switching.test.ts b/tests/integration/pm-provider-switching.test.ts index 3d8f8fcf..769e1e8b 100644 --- a/tests/integration/pm-provider-switching.test.ts +++ b/tests/integration/pm-provider-switching.test.ts @@ -6,6 +6,9 @@ */ import { beforeAll, beforeEach, describe, expect, it } from 'vitest'; +// Bootstrap the integration registry so pmRegistry and createPMProvider work correctly. +// After removing side-effect registration from src/pm/index.ts, this is required. +import '../../src/integrations/bootstrap.js'; import { findProjectByBoardIdFromDb, findProjectByJiraProjectKeyFromDb, diff --git a/tests/unit/cli/credential-scoping.test.ts b/tests/unit/cli/credential-scoping.test.ts index 11e8f115..f1626e6f 100644 --- a/tests/unit/cli/credential-scoping.test.ts +++ b/tests/unit/cli/credential-scoping.test.ts @@ -10,6 +10,45 @@ vi.mock('../../../src/trello/client.js', () => ({ ), })); +// Mocks required for PM integration registration (integrations/bootstrap.js side-effect) +vi.mock('../../../src/config/provider.js', () => ({ + getIntegrationCredential: vi.fn().mockResolvedValue('mock-cred'), + getIntegrationCredentialOrNull: vi.fn().mockResolvedValue(null), + loadProjectConfigByBoardId: vi.fn().mockResolvedValue(null), + loadProjectConfigByJiraProjectKey: vi.fn().mockResolvedValue(null), + findProjectById: vi.fn().mockResolvedValue(null), +})); + +vi.mock('../../../src/db/repositories/credentialsRepository.js', () => ({ + getIntegrationProvider: vi.fn().mockResolvedValue(null), +})); + +vi.mock('../../../src/jira/client.js', () => ({ + withJiraCredentials: vi.fn((_creds: unknown, fn: () => unknown) => fn()), + jiraClient: {}, +})); + +vi.mock('../../../src/sentry/integration.js', () => ({ + getSentryIntegrationConfig: vi.fn().mockResolvedValue(null), + hasAlertingIntegration: vi.fn().mockResolvedValue(false), +})); + +vi.mock('../../../src/router/acknowledgments.js', () => ({ + postTrelloAck: vi.fn().mockResolvedValue(null), + deleteTrelloAck: vi.fn().mockResolvedValue(undefined), + resolveTrelloBotMemberId: vi.fn().mockResolvedValue(null), + postJiraAck: vi.fn().mockResolvedValue(null), + deleteJiraAck: vi.fn().mockResolvedValue(undefined), + resolveJiraBotAccountId: vi.fn().mockResolvedValue(null), +})); + +vi.mock('../../../src/router/reactions.js', () => ({ + sendAcknowledgeReaction: vi.fn(), +})); + +// Register PM integrations in the registry via the canonical bootstrap path +import '../../../src/integrations/bootstrap.js'; + import { CredentialScopedCommand } from '../../../src/cli/base.js'; import { withGitHubToken } from '../../../src/github/client.js'; import { withTrelloCredentials } from '../../../src/trello/client.js'; diff --git a/tests/unit/pm/factory.test.ts b/tests/unit/pm/factory.test.ts index f2d4654c..520231aa 100644 --- a/tests/unit/pm/factory.test.ts +++ b/tests/unit/pm/factory.test.ts @@ -22,11 +22,16 @@ vi.mock('../../../src/pm/jira/adapter.js', () => ({ // Mock provider.ts to avoid DB calls from integration constructors vi.mock('../../../src/config/provider.js', () => ({ getIntegrationCredential: vi.fn().mockResolvedValue('mock-cred'), + getIntegrationCredentialOrNull: vi.fn().mockResolvedValue(null), loadProjectConfigByBoardId: vi.fn().mockResolvedValue(null), loadProjectConfigByJiraProjectKey: vi.fn().mockResolvedValue(null), findProjectById: vi.fn().mockResolvedValue(null), })); +vi.mock('../../../src/db/repositories/credentialsRepository.js', () => ({ + getIntegrationProvider: vi.fn().mockResolvedValue(null), +})); + vi.mock('../../../src/trello/client.js', () => ({ withTrelloCredentials: vi.fn((_creds, fn) => fn()), trelloClient: {}, @@ -37,6 +42,31 @@ vi.mock('../../../src/jira/client.js', () => ({ jiraClient: {}, })); +vi.mock('../../../src/github/client.js', () => ({ + withGitHubToken: vi.fn((_token, fn) => fn()), +})); + +vi.mock('../../../src/sentry/integration.js', () => ({ + getSentryIntegrationConfig: vi.fn().mockResolvedValue(null), + hasAlertingIntegration: vi.fn().mockResolvedValue(false), +})); + +vi.mock('../../../src/router/acknowledgments.js', () => ({ + postTrelloAck: vi.fn().mockResolvedValue(null), + deleteTrelloAck: vi.fn().mockResolvedValue(undefined), + resolveTrelloBotMemberId: vi.fn().mockResolvedValue(null), + postJiraAck: vi.fn().mockResolvedValue(null), + deleteJiraAck: vi.fn().mockResolvedValue(undefined), + resolveJiraBotAccountId: vi.fn().mockResolvedValue(null), +})); + +vi.mock('../../../src/router/reactions.js', () => ({ + sendAcknowledgeReaction: vi.fn(), +})); + +// Import bootstrap after mocks — registers integrations into pmRegistry via the canonical path +import '../../../src/integrations/bootstrap.js'; + // Import after mocks so the integrations register with mocked adapters // factory.ts was removed; createPMProvider is now an inline function in index.ts import { createPMProvider } from '../../../src/pm/index.js'; diff --git a/tests/unit/pm/lifecycle.test.ts b/tests/unit/pm/lifecycle.test.ts index a5cd4f16..e4037bc2 100644 --- a/tests/unit/pm/lifecycle.test.ts +++ b/tests/unit/pm/lifecycle.test.ts @@ -3,11 +3,16 @@ import { beforeEach, describe, expect, it, vi } from 'vitest'; // Mock dependencies before imports vi.mock('../../../src/config/provider.js', () => ({ getIntegrationCredential: vi.fn().mockResolvedValue('mock-cred'), + getIntegrationCredentialOrNull: vi.fn().mockResolvedValue(null), loadProjectConfigByBoardId: vi.fn().mockResolvedValue(null), loadProjectConfigByJiraProjectKey: vi.fn().mockResolvedValue(null), findProjectById: vi.fn().mockResolvedValue(null), })); +vi.mock('../../../src/db/repositories/credentialsRepository.js', () => ({ + getIntegrationProvider: vi.fn().mockResolvedValue(null), +})); + vi.mock('../../../src/trello/client.js', () => ({ withTrelloCredentials: vi.fn((_creds, fn) => fn()), trelloClient: {}, @@ -18,13 +23,35 @@ vi.mock('../../../src/jira/client.js', () => ({ jiraClient: {}, })); +vi.mock('../../../src/github/client.js', () => ({ + withGitHubToken: vi.fn((_token, fn) => fn()), +})); + +vi.mock('../../../src/sentry/integration.js', () => ({ + getSentryIntegrationConfig: vi.fn().mockResolvedValue(null), + hasAlertingIntegration: vi.fn().mockResolvedValue(false), +})); + +vi.mock('../../../src/router/acknowledgments.js', () => ({ + postTrelloAck: vi.fn().mockResolvedValue(null), + deleteTrelloAck: vi.fn().mockResolvedValue(undefined), + resolveTrelloBotMemberId: vi.fn().mockResolvedValue(null), + postJiraAck: vi.fn().mockResolvedValue(null), + deleteJiraAck: vi.fn().mockResolvedValue(undefined), + resolveJiraBotAccountId: vi.fn().mockResolvedValue(null), +})); + +vi.mock('../../../src/router/reactions.js', () => ({ + sendAcknowledgeReaction: vi.fn(), +})); + vi.mock('../../../src/utils/safeOperation.js', () => ({ safeOperation: vi.fn((fn) => fn()), silentOperation: vi.fn((fn) => fn()), })); -// Import after mocks — side-effect import registers integrations with pmRegistry -import '../../../src/pm/index.js'; +// Import after mocks — bootstrap registers integrations with pmRegistry via the canonical path +import '../../../src/integrations/bootstrap.js'; import { PMLifecycleManager, type ProjectPMConfig, diff --git a/tests/unit/router/webhook-signature.test.ts b/tests/unit/router/webhook-signature.test.ts index d1388277..e66b0852 100644 --- a/tests/unit/router/webhook-signature.test.ts +++ b/tests/unit/router/webhook-signature.test.ts @@ -74,8 +74,6 @@ vi.mock('../../../src/agents/prompts/index.js', () => ({ initPrompts: vi.fn().mockResolvedValue(undefined), })); -vi.mock('../../../src/pm/bootstrap.js', () => ({})); - vi.mock('../../../src/triggers/builtins.js', () => ({ registerBuiltInTriggers: vi.fn(), })); diff --git a/tests/unit/triggers/jira-label-added.test.ts b/tests/unit/triggers/jira-label-added.test.ts index 289c7126..d738cb8d 100644 --- a/tests/unit/triggers/jira-label-added.test.ts +++ b/tests/unit/triggers/jira-label-added.test.ts @@ -12,15 +12,25 @@ import { vi.mock('../../../src/triggers/config-resolver.js', () => mockConfigResolverModule); vi.mock('../../../src/triggers/shared/trigger-check.js', () => mockTriggerCheckModule); -// Mocks required for PM integration registration (pm/index.js side-effect) +// Mocks required for PM integration registration (integrations/bootstrap.js side-effect) vi.mock('../../../src/config/provider.js', () => mockConfigProvider); +vi.mock('../../../src/db/repositories/credentialsRepository.js', () => ({ + getIntegrationProvider: vi.fn().mockResolvedValue(null), +})); vi.mock('../../../src/trello/client.js', () => mockTrelloClientModule); vi.mock('../../../src/jira/client.js', () => mockJiraClientModule); +vi.mock('../../../src/github/client.js', () => ({ + withGitHubToken: vi.fn((_token: unknown, fn: () => unknown) => fn()), +})); +vi.mock('../../../src/sentry/integration.js', () => ({ + getSentryIntegrationConfig: vi.fn().mockResolvedValue(null), + hasAlertingIntegration: vi.fn().mockResolvedValue(false), +})); vi.mock('../../../src/router/acknowledgments.js', () => mockAcknowledgmentsModule); vi.mock('../../../src/router/reactions.js', () => mockReactionsModule); -// Register PM integrations in the registry -import '../../../src/pm/index.js'; +// Register PM integrations in the registry via the canonical bootstrap path +import '../../../src/integrations/bootstrap.js'; import { JiraReadyToProcessLabelTrigger } from '../../../src/triggers/jira/label-added.js'; import { checkTriggerEnabled } from '../../../src/triggers/shared/trigger-check.js'; diff --git a/tests/unit/triggers/pr-merged.test.ts b/tests/unit/triggers/pr-merged.test.ts index 7b7a62b4..6687b046 100644 --- a/tests/unit/triggers/pr-merged.test.ts +++ b/tests/unit/triggers/pr-merged.test.ts @@ -35,10 +35,17 @@ vi.mock('../../../src/pm/context.js', () => ({ getPMProvider: () => mockProvider, })); -// Mocks required for PM integration registration (pm/index.js side-effect) +// Mocks required for PM integration registration (integrations/bootstrap.js side-effect) vi.mock('../../../src/config/provider.js', () => mockConfigProvider); +vi.mock('../../../src/db/repositories/credentialsRepository.js', () => ({ + getIntegrationProvider: vi.fn().mockResolvedValue(null), +})); vi.mock('../../../src/trello/client.js', () => mockTrelloClientModule); vi.mock('../../../src/jira/client.js', () => mockJiraClientModule); +vi.mock('../../../src/sentry/integration.js', () => ({ + getSentryIntegrationConfig: vi.fn().mockResolvedValue(null), + hasAlertingIntegration: vi.fn().mockResolvedValue(false), +})); vi.mock('../../../src/router/acknowledgments.js', () => mockAcknowledgmentsModule); vi.mock('../../../src/router/reactions.js', () => mockReactionsModule); vi.mock('../../../src/db/repositories/prWorkItemsRepository.js', () => ({ @@ -51,8 +58,8 @@ vi.mock('../../../src/router/snapshot-manager.js', () => ({ invalidateSnapshot: (...args: unknown[]) => mockInvalidateSnapshot(...args), })); -// Register PM integrations in the registry -import '../../../src/pm/index.js'; +// Register PM integrations in the registry via the canonical bootstrap path +import '../../../src/integrations/bootstrap.js'; import { lookupWorkItemForPR } from '../../../src/db/repositories/prWorkItemsRepository.js'; import { githubClient } from '../../../src/github/client.js'; diff --git a/tests/unit/triggers/pr-ready-to-merge.test.ts b/tests/unit/triggers/pr-ready-to-merge.test.ts index dc930042..200f579e 100644 --- a/tests/unit/triggers/pr-ready-to-merge.test.ts +++ b/tests/unit/triggers/pr-ready-to-merge.test.ts @@ -27,18 +27,25 @@ vi.mock('../../../src/pm/context.js', () => ({ getPMProvider: () => mockProvider, })); -// Mocks required for PM integration registration (pm/index.js side-effect) +// Mocks required for PM integration registration (integrations/bootstrap.js side-effect) vi.mock('../../../src/config/provider.js', () => mockConfigProvider); +vi.mock('../../../src/db/repositories/credentialsRepository.js', () => ({ + getIntegrationProvider: vi.fn().mockResolvedValue(null), +})); vi.mock('../../../src/trello/client.js', () => mockTrelloClientModule); vi.mock('../../../src/jira/client.js', () => mockJiraClientModule); +vi.mock('../../../src/sentry/integration.js', () => ({ + getSentryIntegrationConfig: vi.fn().mockResolvedValue(null), + hasAlertingIntegration: vi.fn().mockResolvedValue(false), +})); vi.mock('../../../src/router/acknowledgments.js', () => mockAcknowledgmentsModule); vi.mock('../../../src/router/reactions.js', () => mockReactionsModule); vi.mock('../../../src/db/repositories/prWorkItemsRepository.js', () => ({ lookupWorkItemForPR: vi.fn(), })); -// Register PM integrations in the registry -import '../../../src/pm/index.js'; +// Register PM integrations in the registry via the canonical bootstrap path +import '../../../src/integrations/bootstrap.js'; import { lookupWorkItemForPR } from '../../../src/db/repositories/prWorkItemsRepository.js'; import { githubClient } from '../../../src/github/client.js'; From 4345775ebbb4532e3639b786b4303df386f81d41 Mon Sep 17 00:00:00 2001 From: aaight Date: Fri, 3 Apr 2026 18:28:53 +0200 Subject: [PATCH 11/52] chore(deps): upgrade @anthropic-ai/claude-agent-sdk from ^0.2.42 to ^0.2.91 (#1079) Co-authored-by: Cascade Bot --- package-lock.json | 16 ++++++++-------- package.json | 2 +- 2 files changed, 9 insertions(+), 9 deletions(-) diff --git a/package-lock.json b/package-lock.json index bae9da68..75b129a6 100644 --- a/package-lock.json +++ b/package-lock.json @@ -9,7 +9,7 @@ "version": "1.0.0", "license": "MIT", "dependencies": { - "@anthropic-ai/claude-agent-sdk": "^0.2.42", + "@anthropic-ai/claude-agent-sdk": "^0.2.91", "@hono/node-server": "^1.13.7", "@hono/trpc-server": "^0.4.2", "@llmist/cli": "^16.0.3", @@ -95,12 +95,12 @@ } }, "node_modules/@anthropic-ai/claude-agent-sdk": { - "version": "0.2.90", - "resolved": "https://registry.npmjs.org/@anthropic-ai/claude-agent-sdk/-/claude-agent-sdk-0.2.90.tgz", - "integrity": "sha512-up5bK0pUbthKIZtNE18WDrIYi0KNpZUhdgjGbkfH/mFQJxI6W/uE3mTiLrCX3UF0SqNl0fMtojBTZPJr2b3O4g==", + "version": "0.2.91", + "resolved": "https://registry.npmjs.org/@anthropic-ai/claude-agent-sdk/-/claude-agent-sdk-0.2.91.tgz", + "integrity": "sha512-DCd5Ad5XKBbIIOMZ73L+c+e9azM6NtZzOtdKQAzykzRG/KxSCMraMAsMMQrJrIUMH3oTtHY7QuQimAiElVVVpA==", "license": "SEE LICENSE IN README.md", "dependencies": { - "@anthropic-ai/sdk": "^0.74.0", + "@anthropic-ai/sdk": "^0.80.0", "@modelcontextprotocol/sdk": "^1.27.1" }, "engines": { @@ -122,9 +122,9 @@ } }, "node_modules/@anthropic-ai/claude-agent-sdk/node_modules/@anthropic-ai/sdk": { - "version": "0.74.0", - "resolved": "https://registry.npmjs.org/@anthropic-ai/sdk/-/sdk-0.74.0.tgz", - "integrity": "sha512-srbJV7JKsc5cQ6eVuFzjZO7UR3xEPJqPamHFIe29bs38Ij2IripoAhC0S5NslNbaFUYqBKypmmpzMTpqfHEUDw==", + "version": "0.80.0", + "resolved": "https://registry.npmjs.org/@anthropic-ai/sdk/-/sdk-0.80.0.tgz", + "integrity": "sha512-WeXLn7zNVk3yjeshn+xZHvld6AoFUOR3Sep6pSoHho5YbSi6HwcirqgPA5ccFuW8QTVJAAU7N8uQQC6Wa9TG+g==", "license": "MIT", "dependencies": { "json-schema-to-ts": "^3.1.1" diff --git a/package.json b/package.json index 5aad896c..abb447e9 100644 --- a/package.json +++ b/package.json @@ -54,7 +54,7 @@ "author": "Zbigniew Sobiecki", "license": "MIT", "dependencies": { - "@anthropic-ai/claude-agent-sdk": "^0.2.42", + "@anthropic-ai/claude-agent-sdk": "^0.2.91", "@hono/node-server": "^1.13.7", "@hono/trpc-server": "^0.4.2", "@llmist/cli": "^16.0.3", From beb568f5bfc055e581be80c6a49d19bd9ee6f78e Mon Sep 17 00:00:00 2001 From: aaight Date: Fri, 3 Apr 2026 18:37:50 +0200 Subject: [PATCH 12/52] refactor(cli): rename config_() getter to cliConfig in DashboardCommand (#1080) Co-authored-by: Cascade Bot --- src/cli/dashboard/_shared/base.ts | 14 +++++++------- src/cli/dashboard/webhooks/create.ts | 2 +- src/cli/dashboard/webhooks/delete.ts | 2 +- src/cli/dashboard/webhooks/list.ts | 2 +- 4 files changed, 10 insertions(+), 10 deletions(-) diff --git a/src/cli/dashboard/_shared/base.ts b/src/cli/dashboard/_shared/base.ts index 76b7bd5d..23ce0000 100644 --- a/src/cli/dashboard/_shared/base.ts +++ b/src/cli/dashboard/_shared/base.ts @@ -52,22 +52,22 @@ export abstract class DashboardCommand extends Command { }; private _client: DashboardClient | undefined; - private _config: CliConfig | undefined; + private _cliConfig: CliConfig | undefined; - protected get config_(): CliConfig { - if (!this._config) { + protected get cliConfig(): CliConfig { + if (!this._cliConfig) { const config = loadConfig(); if (!config) { this.error('Not logged in. Run `cascade login` first.'); } - this._config = config; + this._cliConfig = config; } - return this._config; + return this._cliConfig; } protected get client(): DashboardClient { if (!this._client) { - const config = this.config_; + const config = this.cliConfig; // Allow --server and --org flags to override const flags = this.parseBaseFlags(); if (flags?.server) { @@ -192,7 +192,7 @@ export abstract class DashboardCommand extends Command { process.stderr.write(`${err.stack}\n`); } - const serverUrl = this._config?.serverUrl; + const serverUrl = this._cliConfig?.serverUrl; const actionable = mapError(err, serverUrl); const message = formatActionableError(actionable); diff --git a/src/cli/dashboard/webhooks/create.ts b/src/cli/dashboard/webhooks/create.ts index 8e0a0d21..fca76db1 100644 --- a/src/cli/dashboard/webhooks/create.ts +++ b/src/cli/dashboard/webhooks/create.ts @@ -29,7 +29,7 @@ export default class WebhooksCreate extends DashboardCommand { const { args, flags } = await this.parse(WebhooksCreate); try { - const callbackBaseUrl = flags['callback-url'] || this.config_.serverUrl; + const callbackBaseUrl = flags['callback-url'] || this.cliConfig.serverUrl; const oneTimeTokens: Record = {}; if (flags['github-token']) oneTimeTokens.github = flags['github-token']; diff --git a/src/cli/dashboard/webhooks/delete.ts b/src/cli/dashboard/webhooks/delete.ts index 04a10820..dfc64909 100644 --- a/src/cli/dashboard/webhooks/delete.ts +++ b/src/cli/dashboard/webhooks/delete.ts @@ -28,7 +28,7 @@ export default class WebhooksDelete extends DashboardCommand { const { args, flags } = await this.parse(WebhooksDelete); try { - const callbackBaseUrl = flags['callback-url'] || this.config_.serverUrl; + const callbackBaseUrl = flags['callback-url'] || this.cliConfig.serverUrl; const oneTimeTokens: Record = {}; if (flags['github-token']) oneTimeTokens.github = flags['github-token']; diff --git a/src/cli/dashboard/webhooks/list.ts b/src/cli/dashboard/webhooks/list.ts index 42eaa0ec..495f90cc 100644 --- a/src/cli/dashboard/webhooks/list.ts +++ b/src/cli/dashboard/webhooks/list.ts @@ -33,7 +33,7 @@ export default class WebhooksList extends DashboardCommand { const result = await this.client.webhooks.list.query({ projectId: args.projectId, - callbackBaseUrl: this.config_.serverUrl || undefined, + callbackBaseUrl: this.cliConfig.serverUrl || undefined, oneTimeTokens: Object.keys(oneTimeTokens).length > 0 ? oneTimeTokens : undefined, }); From 81a636ecea3346f46142986c6fdd2ac2771c2802 Mon Sep 17 00:00:00 2001 From: aaight Date: Fri, 3 Apr 2026 19:35:01 +0200 Subject: [PATCH 13/52] refactor(triggers): extract shared webhook utilities and add Sentry concurrency (#1081) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * refactor(triggers): extract shared webhook utilities and add Sentry concurrency * fix(triggers): move startWatchdog inside concurrency callback and fix README - Move startWatchdog() inside the execute closure (GitHub) and inside the concurrency callback (Sentry) so it only fires when an agent actually runs. Previously the watchdog timer started before withAgentTypeConcurrency, so a concurrency-blocked job would tick a timer that could fire process.exit(1) after the container finished its work. - Fix Worker-Side Handler Comparison table: GitHub now uses withAgentTypeConcurrency (not checkAgentTypeConcurrency directly). - Fix Shared Utilities table: remove PM from the "Used By" column for concurrency.ts since src/pm/webhook-handler.ts still uses raw checkAgentTypeConcurrency. - Update flow diagrams to show startWatchdog nested inside withAgentTypeConcurrency. Co-Authored-By: Claude Opus 4.6 * docs(triggers): fix inaccurate "Used By" entries and module comment - trigger-resolution.ts: correct "Used By" to "Sentry (GitHub and PM use inline logic)" — GitHub handler uses inline if/else, not resolveTriggerResult - pm-ack.ts: correct "Used By" to "GitHub worker handler" — the router adapter has its own local postPMAck and does not import the shared utility - pm-ack.ts module comment: remove false claim about router-side usage; add note clarifying the router adapter has its own local function - GitHub webhook flow diagram: replace resolveTriggerResult with accurate inline dispatch description Co-Authored-By: Claude Opus 4.6 --------- Co-authored-by: Cascade Bot Co-authored-by: Claude Opus 4.6 --- src/triggers/README.md | 131 ++++++++++++++++ src/triggers/github/webhook-handler.ts | 106 +++++-------- src/triggers/sentry/webhook-handler.ts | 52 +++---- src/triggers/shared/concurrency.ts | 64 ++++++++ src/triggers/shared/credential-scope.ts | 37 +++++ src/triggers/shared/pm-ack.ts | 50 ++++++ src/triggers/shared/trigger-resolution.ts | 48 ++++++ .../triggers/github-webhook-handler.test.ts | 46 ++---- .../triggers/sentry-webhook-handler.test.ts | 101 ++++++++---- .../unit/triggers/shared/concurrency.test.ts | 145 ++++++++++++++++++ .../triggers/shared/credential-scope.test.ts | 122 +++++++++++++++ .../shared/trigger-resolution.test.ts | 130 ++++++++++++++++ 12 files changed, 883 insertions(+), 149 deletions(-) create mode 100644 src/triggers/README.md create mode 100644 src/triggers/shared/concurrency.ts create mode 100644 src/triggers/shared/credential-scope.ts create mode 100644 src/triggers/shared/pm-ack.ts create mode 100644 src/triggers/shared/trigger-resolution.ts create mode 100644 tests/unit/triggers/shared/concurrency.test.ts create mode 100644 tests/unit/triggers/shared/credential-scope.test.ts create mode 100644 tests/unit/triggers/shared/trigger-resolution.test.ts diff --git a/src/triggers/README.md b/src/triggers/README.md new file mode 100644 index 00000000..78a6813b --- /dev/null +++ b/src/triggers/README.md @@ -0,0 +1,131 @@ +# Trigger System + +This directory contains the trigger handlers and registry that route webhook events to agents. + +## Architecture Overview + +``` +Webhook → Router → Redis/BullMQ → Worker → TriggerRegistry → Agent +``` + +### Two-tier webhook handling + +Webhook processing is split into two distinct tiers: + +| Tier | Where | Purpose | +|------|-------|---------| +| **Router** | `src/router/` | Receive, validate, acknowledge, enqueue | +| **Worker** | `src/triggers/` | Resolve trigger, establish credentials, run agent | + +**Router side is fully unified** — all four providers (Trello, JIRA, GitHub, Sentry) share `processRouterWebhook()` + `RouterPlatformAdapter`. No provider-specific branching in the router. + +**Worker side has intentional divergence** — see below. + +--- + +## Worker-Side Handler Comparison + +| Feature | PM (`processPMWebhook`) | GitHub (`processGitHubWebhook`) | Sentry (`processSentryWebhook`) | +|---------|------------------------|--------------------------------|--------------------------------| +| Trigger dispatch | ✅ Registry | ✅ Registry or pre-resolved | ✅ Registry or pre-resolved | +| Ack comment (PR) | ❌ N/A | ✅ Posts to PR | ❌ N/A | +| Ack comment (PM) | ✅ Via PM lifecycle | ✅ For PM-focused agents | ❌ N/A | +| CI check polling | ❌ N/A | ✅ `pollWaitForChecks()` | ❌ N/A | +| PM credential scope | ✅ `integration.withCredentials` | ✅ `withPMCredentials` | ✅ `withPMCredentials` | +| PM lifecycle ops | ✅ prepareForAgent / handleFailure | ✅ For PM-focused agents | ❌ Skipped | +| Persona token mgmt | ❌ N/A | ✅ Implementer / reviewer | ❌ N/A | +| Agent concurrency | ✅ `checkAgentTypeConcurrency` | ✅ `withAgentTypeConcurrency` | ✅ `withAgentTypeConcurrency` | + +--- + +## Why GitHub and Sentry Cannot Use `processPMWebhook()` + +`processPMWebhook()` assumes **PM semantics**: +- It calls `integration.parseWebhookPayload()` expecting a PM event (card ID, board identifier) +- It drives `PMLifecycleManager` (prepareForAgent → handleFailure / handleSuccess) +- The `PMIntegration` interface provides card parsing, ack cleanup, and credential scoping + +Forcing GitHub or Sentry into this pipeline would require: +- Provider-specific `if` branches inside `processPMWebhook()` — worse than current design +- Mocking PM lifecycle ops (they don't apply to Sentry alerts or GitHub PRs) + +### GitHub-specific features (cannot be generalized) + +1. **CI check polling** (`pollWaitForChecks`) — GitHub is the only provider with CI. No other source polls build status before running an agent. +2. **PR acknowledgment comments** — GitHub PRs get a comment like "👀 Reviewing…" immediately. No other source has this flow. +3. **Dual-persona token management** — The implementer vs. reviewer persona selection is GitHub-specific. No Trello/JIRA/Sentry equivalent. +4. **PM-focused agent routing** — When a PM-focused agent (e.g. `backlog-manager`) fires from a GitHub PR event, it posts the ack to Trello/JIRA instead of the PR, and uses PM-appropriate lifecycle config. + +### Sentry-specific simplicity (intentional) + +Sentry is an alerting source. There are no: +- Work item cards to manage lifecycle on +- PR comments to post +- CI checks to poll + +Sentry's handler is intentionally minimal: load project, resolve trigger, run agent in PM scope. + +--- + +## Shared Utilities (`src/triggers/shared/`) + +To reduce duplication across the three worker-side handlers, shared utilities are extracted to `src/triggers/shared/`: + +| File | Purpose | Used By | +|------|---------|---------| +| `concurrency.ts` | `withAgentTypeConcurrency()` — wraps check→mark→execute→clear | GitHub, Sentry | +| `trigger-resolution.ts` | `resolveTriggerResult()` — pre-resolved or dispatch | Sentry (GitHub and PM use inline logic) | +| `credential-scope.ts` | `withPMScope()` — `withPMCredentials` + `withPMProvider` | GitHub, Sentry | +| `pm-ack.ts` | `postPMAckComment()` — posts ack to Trello/JIRA | GitHub worker handler | +| `agent-execution.ts` | `runAgentExecutionPipeline()` — full agent lifecycle | All handlers (via `webhook-execution.ts`) | +| `webhook-execution.ts` | `runAgentWithCredentials()` — LLM keys + credentials + pipeline | GitHub, PM | + +--- + +## Flow Diagrams + +### PM webhook (Trello / JIRA) + +``` +processPMWebhook(integration, payload, registry) + └─ integration.parseWebhookPayload(payload) → event + └─ integration.lookupProject(event.identifier) → project + └─ integration.withCredentials(projectId) + └─ withPMProvider(pmProvider) + └─ resolveTriggerResult(registry, ctx, preResolved) + └─ handleMatchedTrigger(...) + └─ withAgentTypeConcurrency(projectId, agentType) + └─ startWatchdog() + └─ executeAgent() → runAgentWithCredentials() + └─ injectLlmApiKeys() + └─ withGitHubToken(personaToken) + └─ runAgentExecutionPipeline(...) +``` + +### GitHub webhook + +``` +processGitHubWebhook(payload, eventType, registry, ackCommentId, triggerResult) + └─ integration.parseWebhookPayload(payload) → event + └─ integration.lookupProject(event.repo) → project + └─ [inline] if triggerResult → use it, else dispatchTrigger(registry, payload, project) + └─ [optional] pollWaitForChecks(result, repo) → checksOk + └─ maybePostAckComment(result, ...) → PR or PM ack + └─ runGitHubAgent(result, project, config) + └─ withAgentTypeConcurrency(projectId, agentType) + └─ startWatchdog() + └─ withPMScope(project) + └─ runAgentWithCredentials(integration, result, ...) +``` + +### Sentry webhook + +``` +processSentryWebhook(payload, projectId, registry, triggerResult) + └─ loadProjectConfigById(projectId) → project + └─ resolveTriggerResult(registry, ctx, preResolved) + └─ withAgentTypeConcurrency(projectId, agentType) + └─ startWatchdog() + └─ withPMScope(project) + └─ runAgentExecutionPipeline(result, ...) +``` diff --git a/src/triggers/github/webhook-handler.ts b/src/triggers/github/webhook-handler.ts index 41c79317..762ab7db 100644 --- a/src/triggers/github/webhook-handler.ts +++ b/src/triggers/github/webhook-handler.ts @@ -6,26 +6,23 @@ * - CI check polling → ./check-polling.ts * - Credential scoping + agent execution → ../shared/webhook-execution.ts * - GitHub-specific AgentExecutionConfig → ./integration.ts + * - Agent-type concurrency → ../shared/concurrency.ts + * - PM credential scope → ../shared/credential-scope.ts + * - PM ack posting → ../shared/pm-ack.ts */ import { isPMFocusedAgent } from '../../agents/definitions/loader.js'; import { githubClient, withGitHubToken } from '../../github/client.js'; import { getPersonaToken, resolvePersonaIdentities } from '../../github/personas.js'; -import { withPMCredentials, withPMProvider } from '../../pm/context.js'; -import { createPMProvider, pmRegistry } from '../../pm/index.js'; import { extractGitHubContext, generateAckMessage } from '../../router/ackMessageGenerator.js'; -import { postJiraAck, postTrelloAck } from '../../router/acknowledgments.js'; -import { - checkAgentTypeConcurrency, - clearAgentTypeEnqueued, - markAgentTypeEnqueued, - markRecentlyDispatched, -} from '../../router/agent-type-lock.js'; import type { CascadeConfig, ProjectConfig, TriggerContext } from '../../types/index.js'; import { logger, startWatchdog } from '../../utils/index.js'; import { parseRepoFullName } from '../../utils/repo.js'; import { safeOperation } from '../../utils/safeOperation.js'; import type { TriggerRegistry } from '../registry.js'; +import { withAgentTypeConcurrency } from '../shared/concurrency.js'; +import { withPMScope } from '../shared/credential-scope.js'; +import { postPMAckComment } from '../shared/pm-ack.js'; import { runAgentWithCredentials } from '../shared/webhook-execution.js'; import type { TriggerResult } from '../types.js'; import { postAcknowledgmentComment, updateInitialCommentWithError } from './ack-comments.js'; @@ -53,10 +50,6 @@ function requireProjectId(project: ProjectConfig): string { return project.id; } -function isValidPmType(pmType: string | undefined): pmType is 'trello' | 'jira' { - return pmType === 'trello' || pmType === 'jira'; -} - async function maybePostPmAckComment( result: TriggerResult, payload: unknown, @@ -73,18 +66,13 @@ async function maybePostPmAckComment( ); const pmType = project.pm?.type; - if (!isValidPmType(pmType)) { - logger.warn('Unknown PM type for PM-focused agent ack (worker-side)', { - agentType: result.agentType, - pmType, - }); - return; - } - - const commentId = - pmType === 'trello' - ? await postTrelloAck(projectId, workItemId, message) - : await postJiraAck(projectId, workItemId, message); + const commentId = await postPMAckComment( + projectId, + workItemId, + pmType, + message, + result.agentType ?? undefined, + ); if (commentId) { result.agentInput.ackCommentId = commentId; @@ -102,14 +90,7 @@ async function dispatchTrigger( const personaIdentities = await resolvePersonaIdentities(projectId); const githubToken = await getPersonaToken(projectId, 'implementation'); const ctx: TriggerContext = { project, source: 'github', payload, personaIdentities }; - const pmProvider = createPMProvider(project); - return withPMCredentials( - projectId, - project.pm?.type, - (t) => pmRegistry.getOrNull(t), - () => - withPMProvider(pmProvider, () => withGitHubToken(githubToken, () => registry.dispatch(ctx))), - ); + return withPMScope(project, () => withGitHubToken(githubToken, () => registry.dispatch(ctx))); } /** Post ack comment on the PR using the agent-specific persona token. */ @@ -167,43 +148,38 @@ async function runGitHubAgent( project: ProjectConfig, config: CascadeConfig, ): Promise { - // Agent-type concurrency limit - let agentTypeMaxConcurrency: number | null = null; - if (result.agentType) { - const concurrencyCheck = await checkAgentTypeConcurrency(project.id, result.agentType); - agentTypeMaxConcurrency = concurrencyCheck.maxConcurrency; - if (concurrencyCheck.blocked) return; - if (agentTypeMaxConcurrency !== null) { - markRecentlyDispatched(project.id, result.agentType); - markAgentTypeEnqueued(project.id, result.agentType); - } - } - - startWatchdog(project.watchdogTimeoutMs); - // PM-focused agents (e.g. backlog-manager) triggered from GitHub should use // PM-appropriate lifecycle config: no GitHub PR comment callbacks, allow PM lifecycle ops. const pmFocused = result.agentType ? await isPMFocusedAgent(result.agentType) : false; - try { + const agentType = result.agentType; + + const execute = async () => { + // Only start the watchdog when the agent actually runs (after concurrency check passes). + // Starting it before the check risks a spurious process.exit(1) if the container + // is still alive after a concurrency-blocked job finishes. + startWatchdog(project.watchdogTimeoutMs); + // Establish PM credential + provider scope for agents with workItemId // (needed for PM lifecycle operations: labels, status moves, PR links) - const pmProvider = createPMProvider(project); - await withPMCredentials( - project.id, - project.pm?.type, - (t) => pmRegistry.getOrNull(t), - () => - withPMProvider(pmProvider, () => - runAgentWithCredentials( - integration, - result, - project, - config, - resolveGitHubExecutionConfig(pmFocused), - ), - ), + await withPMScope(project, () => + runAgentWithCredentials( + integration, + result, + project, + config, + resolveGitHubExecutionConfig(pmFocused), + ), ); + }; + + // Agent-type concurrency limit wraps the entire execution + try { + if (agentType) { + await withAgentTypeConcurrency(project.id, agentType, execute, 'GitHub agent'); + } else { + await execute(); + } } catch (err) { logger.error('Failed to process GitHub webhook', { error: String(err) }); if (!pmFocused) { @@ -216,10 +192,6 @@ async function runGitHubAgent( updateInitialCommentWithError(result, { success: false, error: String(err) }), ); } - } finally { - if (result.agentType && agentTypeMaxConcurrency !== null) { - clearAgentTypeEnqueued(project.id, result.agentType); - } } } diff --git a/src/triggers/sentry/webhook-handler.ts b/src/triggers/sentry/webhook-handler.ts index 9bb3ec98..67b29bf8 100644 --- a/src/triggers/sentry/webhook-handler.ts +++ b/src/triggers/sentry/webhook-handler.ts @@ -5,15 +5,21 @@ * falling back to dispatching through the trigger registry if not. * After resolving the trigger result, runs the matched agent via the * shared execution pipeline. + * + * Shared utilities used: + * - Trigger resolution → ../shared/trigger-resolution.ts + * - Agent-type concurrency → ../shared/concurrency.ts + * - PM credential scope → ../shared/credential-scope.ts */ -import { withPMCredentials, withPMProvider } from '../../pm/context.js'; -import { createPMProvider, pmRegistry } from '../../pm/index.js'; import type { TriggerResult } from '../../types/index.js'; import { startWatchdog } from '../../utils/lifecycle.js'; import { logger } from '../../utils/logging.js'; import type { TriggerRegistry } from '../registry.js'; import { runAgentExecutionPipeline } from '../shared/agent-execution.js'; +import { withAgentTypeConcurrency } from '../shared/concurrency.js'; +import { withPMScope } from '../shared/credential-scope.js'; +import { resolveTriggerResult } from '../shared/trigger-resolution.js'; export async function processSentryWebhook( payload: unknown, @@ -29,22 +35,14 @@ export async function processSentryWebhook( return; } + const ctx = { + project: pc.project, + source: 'sentry' as const, + payload, + }; + // Resolve trigger result — use pre-computed from router or dispatch via registry - let result: TriggerResult | null; - if (triggerResult) { - logger.info('processSentryWebhook: using pre-computed trigger result', { - projectId, - agentType: triggerResult.agentType, - }); - result = triggerResult; - } else { - const ctx = { - project: pc.project, - source: 'sentry' as const, - payload, - }; - result = await registry.dispatch(ctx); - } + const result = await resolveTriggerResult(registry, ctx, triggerResult, 'processSentryWebhook'); if (!result) { logger.info('processSentryWebhook: no trigger matched', { projectId }); @@ -63,20 +61,22 @@ export async function processSentryWebhook( agentType: result.agentType, }); - startWatchdog(pc.project.watchdogTimeoutMs); - - const pmProvider = createPMProvider(pc.project); - await withPMCredentials( + await withAgentTypeConcurrency( pc.project.id, - pc.project.pm?.type, - (t) => pmRegistry.getOrNull(t), - () => - withPMProvider(pmProvider, () => + result.agentType, + () => { + // Only start the watchdog when the agent actually runs (after concurrency check passes). + // Starting it before the check risks a spurious process.exit(1) if the container + // is still alive after a concurrency-blocked job finishes. + startWatchdog(pc.project.watchdogTimeoutMs); + return withPMScope(pc.project, () => runAgentExecutionPipeline(result, pc.project, pc.config, { logLabel: 'Sentry agent', skipPrepareForAgent: true, skipHandleFailure: true, }), - ), + ); + }, + 'processSentryWebhook', ); } diff --git a/src/triggers/shared/concurrency.ts b/src/triggers/shared/concurrency.ts new file mode 100644 index 00000000..b2414f69 --- /dev/null +++ b/src/triggers/shared/concurrency.ts @@ -0,0 +1,64 @@ +/** + * Shared concurrency management utility for webhook handlers. + * + * Wraps the duplicated check→mark→execute→clear pattern used by both + * `handleMatchedTrigger()` (pm/webhook-handler) and `runGitHubAgent()` + * (github/webhook-handler) into a single reusable function. + * + * Usage: + * await withAgentTypeConcurrency(projectId, agentType, () => runTheAgent()); + */ + +import { + checkAgentTypeConcurrency, + clearAgentTypeEnqueued, + markAgentTypeEnqueued, + markRecentlyDispatched, +} from '../../router/agent-type-lock.js'; +import { logger } from '../../utils/logging.js'; + +/** + * Execute `fn` within agent-type concurrency limits. + * + * 1. Checks whether the agent-type is at its concurrency limit. + * 2. If not blocked, marks the slot as enqueued and runs `fn`. + * 3. Clears the enqueued slot in a `finally` block. + * + * Returns `false` if the concurrency check was blocked (fn was not called), + * `true` if fn was called (regardless of whether it succeeded). + * + * @param projectId The project ID to scope concurrency to. + * @param agentType The agent type being dispatched. + * @param fn The async function to run if not blocked. + * @param logLabel Optional label for log messages (default: 'Agent'). + */ +export async function withAgentTypeConcurrency( + projectId: string, + agentType: string, + fn: () => Promise, + logLabel?: string, +): Promise { + const concurrencyCheck = await checkAgentTypeConcurrency(projectId, agentType, logLabel); + if (concurrencyCheck.blocked) { + logger.info(`${logLabel ?? 'Agent'} type concurrency blocked, skipping`, { + projectId, + agentType, + }); + return false; + } + + const hasLimit = concurrencyCheck.maxConcurrency !== null; + if (hasLimit) { + markRecentlyDispatched(projectId, agentType); + markAgentTypeEnqueued(projectId, agentType); + } + + try { + await fn(); + return true; + } finally { + if (hasLimit) { + clearAgentTypeEnqueued(projectId, agentType); + } + } +} diff --git a/src/triggers/shared/credential-scope.ts b/src/triggers/shared/credential-scope.ts new file mode 100644 index 00000000..db78bb81 --- /dev/null +++ b/src/triggers/shared/credential-scope.ts @@ -0,0 +1,37 @@ +/** + * Shared PM credential scoping utility for webhook handlers. + * + * Wraps the `withPMCredentials(…, () => withPMProvider(…, fn))` pattern + * used by GitHub (webhook-handler.ts L190-206) and Sentry (webhook-handler.ts L68-81) + * into a single reusable function. + * + * Usage: + * await withPMScope(project, () => runTheAgent()); + */ + +import { withPMCredentials, withPMProvider } from '../../pm/context.js'; +import { createPMProvider, pmRegistry } from '../../pm/index.js'; +import type { ProjectConfig } from '../../types/index.js'; + +/** + * Execute `fn` within the PM credential and PM provider scope for a project. + * + * Sets up: + * withPMCredentials → withPMProvider → fn() + * + * This is the standard PM scope needed for agents that perform PM lifecycle + * operations (labels, status moves, PR links). Falls through gracefully if + * no PM type is configured on the project. + * + * @param project Project config (used to determine PM type and credentials). + * @param fn Async function to run inside the PM scope. + */ +export async function withPMScope(project: ProjectConfig, fn: () => Promise): Promise { + const pmProvider = createPMProvider(project); + return withPMCredentials( + project.id, + project.pm?.type, + (t) => pmRegistry.getOrNull(t), + () => withPMProvider(pmProvider, fn), + ); +} diff --git a/src/triggers/shared/pm-ack.ts b/src/triggers/shared/pm-ack.ts new file mode 100644 index 00000000..72b8c3ad --- /dev/null +++ b/src/triggers/shared/pm-ack.ts @@ -0,0 +1,50 @@ +/** + * Shared PM acknowledgment posting utility for webhook handlers. + * + * Centralises the logic for posting acknowledgment comments to PM tools + * (Trello/JIRA) for PM-focused agents triggered from GitHub or other + * non-PM sources. + * + * Used by: + * - Worker-side: `triggers/github/webhook-handler.ts` (maybePostPmAckComment) + * + * Note: `router/adapters/github.ts` has its own local `postPMAck` function + * and does not use this shared utility. + */ + +import { postJiraAck, postTrelloAck } from '../../router/acknowledgments.js'; +import { logger } from '../../utils/logging.js'; + +/** + * Post a PM acknowledgment comment to Trello or JIRA. + * + * Returns the comment ID if successfully posted, or null if the PM type + * is not supported or posting failed. + * + * @param projectId The project ID for credential resolution. + * @param workItemId The work item ID to post the comment on (card ID / issue key). + * @param pmType The PM provider type ('trello' or 'jira'). + * @param message The acknowledgment message to post. + * @param agentType Used only for warning log context when pmType is unknown. + */ +export async function postPMAckComment( + projectId: string, + workItemId: string, + pmType: string | undefined, + message: string, + agentType?: string, +): Promise { + if (pmType === 'trello') { + return postTrelloAck(projectId, workItemId, message); + } + + if (pmType === 'jira') { + return postJiraAck(projectId, workItemId, message); + } + + logger.warn('Unknown PM type for PM-focused agent ack, skipping', { + agentType, + pmType, + }); + return null; +} diff --git a/src/triggers/shared/trigger-resolution.ts b/src/triggers/shared/trigger-resolution.ts new file mode 100644 index 00000000..07ec264e --- /dev/null +++ b/src/triggers/shared/trigger-resolution.ts @@ -0,0 +1,48 @@ +/** + * Shared trigger resolution utility for webhook handlers. + * + * Extracts the "if pre-resolved result use it, else dispatch" pattern + * duplicated across GitHub (webhook-handler.ts L253-261), + * Sentry (webhook-handler.ts L33-47), and PM (webhook-handler.ts L64-87). + * + * Usage: + * const result = await resolveTriggerResult(registry, ctx, preResolvedResult); + */ + +import type { TriggerContext, TriggerResult } from '../../types/index.js'; +import { logger } from '../../utils/logging.js'; +import type { TriggerRegistry } from '../registry.js'; + +/** + * Resolve a trigger result from either a pre-computed result or registry dispatch. + * + * If `preResolvedResult` is provided, it is returned immediately (skipping dispatch). + * Otherwise, `registry.dispatch(ctx)` is called to compute the result. + * + * @param registry Trigger registry to dispatch against (when no pre-resolved result). + * @param ctx Trigger context passed to registry dispatch. + * @param preResolvedResult Optional pre-computed result from the router (skips dispatch). + * @param logLabel Optional label for log messages (default: uses ctx.source). + * @returns The resolved TriggerResult, or null if no trigger matched. + */ +export async function resolveTriggerResult( + registry: TriggerRegistry, + ctx: TriggerContext, + preResolvedResult?: TriggerResult, + logLabel?: string, +): Promise { + const label = logLabel ?? ctx.source; + + if (preResolvedResult) { + logger.info(`${label}: using pre-resolved trigger result`, { + agentType: preResolvedResult.agentType, + }); + return preResolvedResult; + } + + const result = await registry.dispatch(ctx); + if (!result) { + logger.info(`${label}: no trigger matched`); + } + return result; +} diff --git a/tests/unit/triggers/github-webhook-handler.test.ts b/tests/unit/triggers/github-webhook-handler.test.ts index 3cfecf4b..32651de4 100644 --- a/tests/unit/triggers/github-webhook-handler.test.ts +++ b/tests/unit/triggers/github-webhook-handler.test.ts @@ -59,23 +59,21 @@ vi.mock('../../../src/router/ackMessageGenerator.js', () => ({ generateAckMessage: vi.fn().mockResolvedValue('Starting...'), })); -vi.mock('../../../src/router/acknowledgments.js', () => ({ - postTrelloAck: vi.fn().mockResolvedValue('comment-id'), - postJiraAck: vi.fn().mockResolvedValue('comment-id'), -})); - vi.mock('../../../src/utils/safeOperation.js', () => ({ safeOperation: vi.fn().mockImplementation((fn) => fn()), })); -vi.mock('../../../src/pm/context.js', () => ({ - withPMCredentials: vi.fn().mockImplementation((_id, _type, _get, fn) => fn()), - withPMProvider: vi.fn().mockImplementation((_provider, fn) => fn()), +// Mock shared utilities used by processGitHubWebhook +vi.mock('../../../src/triggers/shared/concurrency.js', () => ({ + withAgentTypeConcurrency: vi.fn().mockImplementation((_id, _type, fn) => fn()), +})); + +vi.mock('../../../src/triggers/shared/credential-scope.js', () => ({ + withPMScope: vi.fn().mockImplementation((_project, fn) => fn()), })); -vi.mock('../../../src/pm/index.js', () => ({ - createPMProvider: vi.fn().mockReturnValue({}), - pmRegistry: { getOrNull: vi.fn().mockReturnValue(null) }, +vi.mock('../../../src/triggers/shared/pm-ack.js', () => ({ + postPMAckComment: vi.fn().mockResolvedValue('pm-comment-id'), })); vi.mock('../../../src/triggers/shared/webhook-execution.js', () => ({ @@ -91,13 +89,6 @@ vi.mock('../../../src/triggers/github/check-polling.js', () => ({ pollWaitForChecks: vi.fn().mockResolvedValue(true), })); -vi.mock('../../../src/router/agent-type-lock.js', () => ({ - checkAgentTypeConcurrency: vi.fn().mockResolvedValue({ maxConcurrency: null, blocked: false }), - markAgentTypeEnqueued: vi.fn(), - clearAgentTypeEnqueued: vi.fn(), - markRecentlyDispatched: vi.fn(), -})); - vi.mock('../../../src/utils/index.js', () => ({ logger: { debug: vi.fn(), @@ -110,14 +101,14 @@ vi.mock('../../../src/utils/index.js', () => ({ import { isPMFocusedAgent } from '../../../src/agents/definitions/loader.js'; import { githubClient } from '../../../src/github/client.js'; -import { postJiraAck, postTrelloAck } from '../../../src/router/acknowledgments.js'; -import { checkAgentTypeConcurrency } from '../../../src/router/agent-type-lock.js'; import { postAcknowledgmentComment, updateInitialCommentWithError, } from '../../../src/triggers/github/ack-comments.js'; import { pollWaitForChecks } from '../../../src/triggers/github/check-polling.js'; import { processGitHubWebhook } from '../../../src/triggers/github/webhook-handler.js'; +import { withAgentTypeConcurrency } from '../../../src/triggers/shared/concurrency.js'; +import { postPMAckComment } from '../../../src/triggers/shared/pm-ack.js'; import { runAgentWithCredentials } from '../../../src/triggers/shared/webhook-execution.js'; import { startWatchdog } from '../../../src/utils/index.js'; @@ -144,6 +135,7 @@ const validPayload = { beforeEach(() => { mockRunAgentWithCredentials.mockResolvedValue(undefined); + vi.mocked(withAgentTypeConcurrency).mockImplementation((_id, _type, fn) => fn()); }); describe('processGitHubWebhook', () => { @@ -235,10 +227,7 @@ describe('processGitHubWebhook', () => { }); it('skips agent execution when agent-type concurrency is blocked', async () => { - vi.mocked(checkAgentTypeConcurrency).mockResolvedValueOnce({ - maxConcurrency: 1, - blocked: true, - }); + vi.mocked(withAgentTypeConcurrency).mockResolvedValueOnce(false); const registry = createMockRegistry(); await processGitHubWebhook(validPayload, 'pull_request', registry as never); expect(mockRunAgentWithCredentials).not.toHaveBeenCalled(); @@ -322,9 +311,9 @@ describe('processGitHubWebhook', () => { expect(mockRunAgentWithCredentials).not.toHaveBeenCalled(); }); - it('posts PM ack to Trello when PM-focused agent triggered from GitHub (trello PM)', async () => { + it('posts PM ack to PM tool when PM-focused agent triggered from GitHub', async () => { vi.mocked(isPMFocusedAgent).mockResolvedValue(true); - vi.mocked(postTrelloAck).mockResolvedValue('trello-ack-id'); + vi.mocked(postPMAckComment).mockResolvedValue('pm-ack-id'); // Override lookupProject to return a project with trello PM const { GitHubWebhookIntegration } = await import( @@ -354,7 +343,7 @@ describe('processGitHubWebhook', () => { await processGitHubWebhook(validPayload, 'pull_request', registry as never); - // PM ack should be posted to Trello (or attempt was made); GitHub PR comment not used + // PM ack should be posted to PM tool; GitHub PR comment not used expect(postAcknowledgmentComment).not.toHaveBeenCalled(); }); @@ -371,8 +360,7 @@ describe('processGitHubWebhook', () => { await processGitHubWebhook(validPayload, 'pull_request', registry as never); - expect(postTrelloAck).not.toHaveBeenCalled(); - expect(postJiraAck).not.toHaveBeenCalled(); + expect(postPMAckComment).not.toHaveBeenCalled(); expect(postAcknowledgmentComment).not.toHaveBeenCalled(); }); diff --git a/tests/unit/triggers/sentry-webhook-handler.test.ts b/tests/unit/triggers/sentry-webhook-handler.test.ts index 1fb9d121..e7c91c22 100644 --- a/tests/unit/triggers/sentry-webhook-handler.test.ts +++ b/tests/unit/triggers/sentry-webhook-handler.test.ts @@ -7,16 +7,6 @@ vi.mock('../../../src/config/provider.js', () => ({ loadProjectConfigById: vi.fn(), })); -vi.mock('../../../src/pm/context.js', () => ({ - withPMCredentials: vi.fn().mockImplementation((_id, _type, _getter, fn) => fn()), - withPMProvider: vi.fn().mockImplementation((_provider, fn) => fn()), -})); - -vi.mock('../../../src/pm/index.js', () => ({ - createPMProvider: vi.fn().mockReturnValue({}), - pmRegistry: { getOrNull: vi.fn().mockReturnValue(null) }, -})); - vi.mock('../../../src/utils/lifecycle.js', () => ({ startWatchdog: vi.fn(), })); @@ -25,10 +15,25 @@ vi.mock('../../../src/triggers/shared/agent-execution.js', () => ({ runAgentExecutionPipeline: vi.fn().mockResolvedValue(undefined), })); +// Mock shared utilities used by processSentryWebhook +vi.mock('../../../src/triggers/shared/concurrency.js', () => ({ + withAgentTypeConcurrency: vi.fn().mockImplementation((_projectId, _agentType, fn) => fn()), +})); + +vi.mock('../../../src/triggers/shared/credential-scope.js', () => ({ + withPMScope: vi.fn().mockImplementation((_project, fn) => fn()), +})); + +vi.mock('../../../src/triggers/shared/trigger-resolution.js', () => ({ + resolveTriggerResult: vi.fn(), +})); + import { loadProjectConfigById } from '../../../src/config/provider.js'; -import { withPMCredentials, withPMProvider } from '../../../src/pm/context.js'; import { processSentryWebhook } from '../../../src/triggers/sentry/webhook-handler.js'; import { runAgentExecutionPipeline } from '../../../src/triggers/shared/agent-execution.js'; +import { withAgentTypeConcurrency } from '../../../src/triggers/shared/concurrency.js'; +import { withPMScope } from '../../../src/triggers/shared/credential-scope.js'; +import { resolveTriggerResult } from '../../../src/triggers/shared/trigger-resolution.js'; import { createMockProject } from '../../helpers/factories.js'; const mockProject = createMockProject({ id: 'proj-sentry' }); @@ -45,22 +50,29 @@ describe('processSentryWebhook', () => { }); vi.mocked(runAgentExecutionPipeline).mockResolvedValue(undefined); // Re-apply pass-through implementations after resetAllMocks clears them - vi.mocked(withPMCredentials).mockImplementation((_id, _type, _getter, fn) => fn()); - vi.mocked(withPMProvider).mockImplementation((_provider, fn) => fn()); + vi.mocked(withAgentTypeConcurrency).mockImplementation((_projectId, _agentType, fn) => + fn().then(() => true), + ); + vi.mocked(withPMScope).mockImplementation((_project, fn) => fn()); + // resolveTriggerResult defaults to null (no trigger matched) + vi.mocked(resolveTriggerResult).mockResolvedValue(null); }); - it('loads project config by projectId and dispatches with sentry source when no triggerResult', async () => { + it('loads project config by projectId and calls resolveTriggerResult with sentry source', async () => { const payload = { resource: 'event_alert', cascadeProjectId: 'proj-sentry' }; await processSentryWebhook(payload, 'proj-sentry', mockRegistry as never, undefined); expect(loadProjectConfigById).toHaveBeenCalledWith('proj-sentry'); - expect(mockRegistry.dispatch).toHaveBeenCalledWith( + expect(resolveTriggerResult).toHaveBeenCalledWith( + mockRegistry, expect.objectContaining({ source: 'sentry', payload, project: mockProject, }), + undefined, + 'processSentryWebhook', ); }); @@ -69,13 +81,14 @@ describe('processSentryWebhook', () => { await processSentryWebhook(payload, 'proj-sentry', mockRegistry as never); - const dispatchCall = mockRegistry.dispatch.mock.calls[0][0]; - expect(dispatchCall.source).toBe('sentry'); - expect(dispatchCall.payload).toBe(payload); - expect(dispatchCall.project).toBe(mockProject); + const resolveCall = vi.mocked(resolveTriggerResult).mock.calls[0]; + const ctx = resolveCall[1]; + expect(ctx.source).toBe('sentry'); + expect(ctx.payload).toBe(payload); + expect(ctx.project).toBe(mockProject); }); - it('logs a warning and returns without dispatching when project is not found', async () => { + it('logs a warning and returns without calling resolveTriggerResult when project is not found', async () => { vi.mocked(loadProjectConfigById).mockResolvedValue(undefined); const payload = { resource: 'event_alert' }; @@ -85,26 +98,33 @@ describe('processSentryWebhook', () => { expect.stringContaining('project not found'), expect.objectContaining({ projectId: 'unknown-proj' }), ); - expect(mockRegistry.dispatch).not.toHaveBeenCalled(); + expect(resolveTriggerResult).not.toHaveBeenCalled(); }); - it('does NOT call registry.dispatch when triggerResult is provided', async () => { + it('passes triggerResult to resolveTriggerResult when provided', async () => { const payload = { resource: 'event_alert', cascadeProjectId: 'proj-sentry' }; const triggerResult = { agentType: 'alerting', agentInput: {} } as never; await processSentryWebhook(payload, 'proj-sentry', mockRegistry as never, triggerResult); - expect(mockRegistry.dispatch).not.toHaveBeenCalled(); + expect(resolveTriggerResult).toHaveBeenCalledWith( + mockRegistry, + expect.any(Object), + triggerResult, + 'processSentryWebhook', + ); }); - it('logs info message when triggerResult is provided', async () => { + it('logs info message when triggerResult is provided (via resolveTriggerResult)', async () => { const payload = { resource: 'event_alert' }; const triggerResult = { agentType: 'alerting', agentInput: {} } as never; + vi.mocked(resolveTriggerResult).mockResolvedValue(triggerResult); await processSentryWebhook(payload, 'proj-sentry', mockRegistry as never, triggerResult); + // processSentryWebhook logs "running agent" when it proceeds after resolution expect(mockLogger.info).toHaveBeenCalledWith( - expect.stringContaining('pre-computed trigger result'), + expect.stringContaining('running agent'), expect.objectContaining({ projectId: 'proj-sentry', agentType: 'alerting' }), ); }); @@ -112,6 +132,7 @@ describe('processSentryWebhook', () => { it('runs the agent execution pipeline when triggerResult has an agentType', async () => { const payload = { resource: 'event_alert', cascadeProjectId: 'proj-sentry' }; const triggerResult = { agentType: 'alerting', agentInput: {} } as never; + vi.mocked(resolveTriggerResult).mockResolvedValue(triggerResult); await processSentryWebhook(payload, 'proj-sentry', mockRegistry as never, triggerResult); @@ -123,12 +144,38 @@ describe('processSentryWebhook', () => { ); }); - it('does not run the agent when registry dispatch returns null', async () => { + it('does not run the agent when resolveTriggerResult returns null', async () => { const payload = { resource: 'event_alert', cascadeProjectId: 'proj-sentry' }; - mockRegistry.dispatch.mockResolvedValue(null); + vi.mocked(resolveTriggerResult).mockResolvedValue(null); await processSentryWebhook(payload, 'proj-sentry', mockRegistry as never); expect(runAgentExecutionPipeline).not.toHaveBeenCalled(); }); + + it('applies agent-type concurrency when running the agent', async () => { + const payload = { resource: 'event_alert' }; + const triggerResult = { agentType: 'alerting', agentInput: {} } as never; + vi.mocked(resolveTriggerResult).mockResolvedValue(triggerResult); + + await processSentryWebhook(payload, 'proj-sentry', mockRegistry as never, triggerResult); + + expect(withAgentTypeConcurrency).toHaveBeenCalledWith( + 'proj-sentry', + 'alerting', + expect.any(Function), + 'processSentryWebhook', + ); + }); + + it('skips execution when concurrency is blocked', async () => { + const payload = { resource: 'event_alert' }; + const triggerResult = { agentType: 'alerting', agentInput: {} } as never; + vi.mocked(resolveTriggerResult).mockResolvedValue(triggerResult); + vi.mocked(withAgentTypeConcurrency).mockResolvedValue(false); + + await processSentryWebhook(payload, 'proj-sentry', mockRegistry as never, triggerResult); + + expect(runAgentExecutionPipeline).not.toHaveBeenCalled(); + }); }); diff --git a/tests/unit/triggers/shared/concurrency.test.ts b/tests/unit/triggers/shared/concurrency.test.ts new file mode 100644 index 00000000..63a095c6 --- /dev/null +++ b/tests/unit/triggers/shared/concurrency.test.ts @@ -0,0 +1,145 @@ +import { beforeEach, describe, expect, it, vi } from 'vitest'; + +// --------------------------------------------------------------------------- +// Hoisted mocks — declared before imports so module-level singletons are mocked +// --------------------------------------------------------------------------- + +const { + mockCheckAgentTypeConcurrency, + mockMarkAgentTypeEnqueued, + mockClearAgentTypeEnqueued, + mockMarkRecentlyDispatched, +} = vi.hoisted(() => ({ + mockCheckAgentTypeConcurrency: vi.fn(), + mockMarkAgentTypeEnqueued: vi.fn(), + mockClearAgentTypeEnqueued: vi.fn(), + mockMarkRecentlyDispatched: vi.fn(), +})); + +vi.mock('../../../../src/router/agent-type-lock.js', () => ({ + checkAgentTypeConcurrency: mockCheckAgentTypeConcurrency, + markAgentTypeEnqueued: mockMarkAgentTypeEnqueued, + clearAgentTypeEnqueued: mockClearAgentTypeEnqueued, + markRecentlyDispatched: mockMarkRecentlyDispatched, +})); + +vi.mock('../../../../src/utils/logging.js', () => ({ + logger: { + info: vi.fn(), + warn: vi.fn(), + error: vi.fn(), + debug: vi.fn(), + }, +})); + +import { withAgentTypeConcurrency } from '../../../../src/triggers/shared/concurrency.js'; + +// --------------------------------------------------------------------------- +// Fixtures +// --------------------------------------------------------------------------- + +const PROJECT_ID = 'project-1'; +const AGENT_TYPE = 'implementation'; + +beforeEach(() => { + vi.clearAllMocks(); + // Default: no limit, not blocked + mockCheckAgentTypeConcurrency.mockResolvedValue({ maxConcurrency: null, blocked: false }); +}); + +// --------------------------------------------------------------------------- +// Tests +// --------------------------------------------------------------------------- + +describe('withAgentTypeConcurrency', () => { + it('calls fn and returns true when not blocked and no limit', async () => { + const fn = vi.fn().mockResolvedValue(undefined); + mockCheckAgentTypeConcurrency.mockResolvedValue({ maxConcurrency: null, blocked: false }); + + const result = await withAgentTypeConcurrency(PROJECT_ID, AGENT_TYPE, fn); + + expect(result).toBe(true); + expect(fn).toHaveBeenCalledOnce(); + }); + + it('returns false and does not call fn when blocked', async () => { + const fn = vi.fn().mockResolvedValue(undefined); + mockCheckAgentTypeConcurrency.mockResolvedValue({ maxConcurrency: 1, blocked: true }); + + const result = await withAgentTypeConcurrency(PROJECT_ID, AGENT_TYPE, fn); + + expect(result).toBe(false); + expect(fn).not.toHaveBeenCalled(); + }); + + it('marks enqueued and dispatched when limit is set (not null)', async () => { + const fn = vi.fn().mockResolvedValue(undefined); + mockCheckAgentTypeConcurrency.mockResolvedValue({ maxConcurrency: 2, blocked: false }); + + await withAgentTypeConcurrency(PROJECT_ID, AGENT_TYPE, fn); + + expect(mockMarkRecentlyDispatched).toHaveBeenCalledWith(PROJECT_ID, AGENT_TYPE); + expect(mockMarkAgentTypeEnqueued).toHaveBeenCalledWith(PROJECT_ID, AGENT_TYPE); + }); + + it('does not mark enqueued when no limit (maxConcurrency null)', async () => { + const fn = vi.fn().mockResolvedValue(undefined); + mockCheckAgentTypeConcurrency.mockResolvedValue({ maxConcurrency: null, blocked: false }); + + await withAgentTypeConcurrency(PROJECT_ID, AGENT_TYPE, fn); + + expect(mockMarkRecentlyDispatched).not.toHaveBeenCalled(); + expect(mockMarkAgentTypeEnqueued).not.toHaveBeenCalled(); + }); + + it('clears enqueued slot in finally block when limit is set', async () => { + const fn = vi.fn().mockResolvedValue(undefined); + mockCheckAgentTypeConcurrency.mockResolvedValue({ maxConcurrency: 1, blocked: false }); + + await withAgentTypeConcurrency(PROJECT_ID, AGENT_TYPE, fn); + + expect(mockClearAgentTypeEnqueued).toHaveBeenCalledWith(PROJECT_ID, AGENT_TYPE); + }); + + it('does not clear enqueued slot when no limit', async () => { + const fn = vi.fn().mockResolvedValue(undefined); + mockCheckAgentTypeConcurrency.mockResolvedValue({ maxConcurrency: null, blocked: false }); + + await withAgentTypeConcurrency(PROJECT_ID, AGENT_TYPE, fn); + + expect(mockClearAgentTypeEnqueued).not.toHaveBeenCalled(); + }); + + it('clears enqueued slot even when fn throws', async () => { + const fn = vi.fn().mockRejectedValue(new Error('agent crashed')); + mockCheckAgentTypeConcurrency.mockResolvedValue({ maxConcurrency: 1, blocked: false }); + + await expect(withAgentTypeConcurrency(PROJECT_ID, AGENT_TYPE, fn)).rejects.toThrow( + 'agent crashed', + ); + + expect(mockClearAgentTypeEnqueued).toHaveBeenCalledWith(PROJECT_ID, AGENT_TYPE); + }); + + it('does not clear enqueued slot when blocked (fn never ran)', async () => { + const fn = vi.fn(); + mockCheckAgentTypeConcurrency.mockResolvedValue({ maxConcurrency: 1, blocked: true }); + + await withAgentTypeConcurrency(PROJECT_ID, AGENT_TYPE, fn); + + expect(mockClearAgentTypeEnqueued).not.toHaveBeenCalled(); + }); + + it('passes logLabel to checkAgentTypeConcurrency', async () => { + const fn = vi.fn().mockResolvedValue(undefined); + mockCheckAgentTypeConcurrency.mockResolvedValue({ maxConcurrency: null, blocked: false }); + + await withAgentTypeConcurrency(PROJECT_ID, AGENT_TYPE, fn, 'My handler'); + + expect(mockCheckAgentTypeConcurrency).toHaveBeenCalledWith( + PROJECT_ID, + AGENT_TYPE, + 'My handler', + ); + }); +}); diff --git a/tests/unit/triggers/shared/credential-scope.test.ts b/tests/unit/triggers/shared/credential-scope.test.ts new file mode 100644 index 00000000..d09ef825 --- /dev/null +++ b/tests/unit/triggers/shared/credential-scope.test.ts @@ -0,0 +1,122 @@ +import { beforeEach, describe, expect, it, vi } from 'vitest'; + +// --------------------------------------------------------------------------- +// Hoisted mocks +// --------------------------------------------------------------------------- + +const { mockWithPMCredentials, mockWithPMProvider, mockCreatePMProvider, mockGetOrNull } = + vi.hoisted(() => { + return { + mockWithPMCredentials: vi.fn().mockImplementation((_id, _type, _getter, fn) => fn()), + mockWithPMProvider: vi.fn().mockImplementation((_provider, fn) => fn()), + mockCreatePMProvider: vi.fn().mockReturnValue({ type: 'trello' }), + mockGetOrNull: vi.fn().mockReturnValue(null), + }; + }); + +vi.mock('../../../../src/pm/context.js', () => ({ + withPMCredentials: mockWithPMCredentials, + withPMProvider: mockWithPMProvider, +})); + +vi.mock('../../../../src/pm/index.js', () => ({ + createPMProvider: mockCreatePMProvider, + pmRegistry: { getOrNull: mockGetOrNull }, +})); + +import { withPMScope } from '../../../../src/triggers/shared/credential-scope.js'; +import type { ProjectConfig } from '../../../../src/types/index.js'; + +// --------------------------------------------------------------------------- +// Fixtures +// --------------------------------------------------------------------------- + +const mockProject = { + id: 'project-1', + name: 'Test', + repo: 'owner/repo', + baseBranch: 'main', + pm: { type: 'trello' }, +} as ProjectConfig; + +// --------------------------------------------------------------------------- +// Tests +// --------------------------------------------------------------------------- + +describe('withPMScope', () => { + beforeEach(() => { + vi.clearAllMocks(); + mockWithPMCredentials.mockImplementation( + (_id: unknown, _type: unknown, _getter: unknown, fn: () => Promise) => fn(), + ); + mockWithPMProvider.mockImplementation((_provider: unknown, fn: () => Promise) => fn()); + mockCreatePMProvider.mockReturnValue({ type: 'trello' }); + }); + + it('calls fn and returns its result', async () => { + const fn = vi.fn().mockResolvedValue('result'); + + const result = await withPMScope(mockProject, fn); + + expect(result).toBe('result'); + expect(fn).toHaveBeenCalledOnce(); + }); + + it('creates a PM provider using the project', async () => { + const fn = vi.fn().mockResolvedValue(undefined); + + await withPMScope(mockProject, fn); + + expect(mockCreatePMProvider).toHaveBeenCalledWith(mockProject); + }); + + it('calls withPMCredentials with project id and pm type', async () => { + const fn = vi.fn().mockResolvedValue(undefined); + + await withPMScope(mockProject, fn); + + expect(mockWithPMCredentials).toHaveBeenCalledWith( + 'project-1', + 'trello', + expect.any(Function), + expect.any(Function), + ); + }); + + it('calls withPMProvider with the created PM provider', async () => { + const fn = vi.fn().mockResolvedValue(undefined); + const mockProvider = { type: 'trello' }; + mockCreatePMProvider.mockReturnValue(mockProvider); + + await withPMScope(mockProject, fn); + + expect(mockWithPMProvider).toHaveBeenCalledWith(mockProvider, expect.any(Function)); + }); + + it('works when project has no PM type', async () => { + const projectWithoutPM = { ...mockProject, pm: undefined } as ProjectConfig; + const fn = vi.fn().mockResolvedValue('value'); + + const result = await withPMScope(projectWithoutPM, fn); + + expect(result).toBe('value'); + // withPMCredentials is still called (falls through to fn() when pmType is undefined) + expect(mockWithPMCredentials).toHaveBeenCalledWith( + 'project-1', + undefined, + expect.any(Function), + expect.any(Function), + ); + }); + + it('uses pmRegistry.getOrNull as the integration getter', async () => { + const fn = vi.fn().mockResolvedValue(undefined); + + await withPMScope(mockProject, fn); + + // The getter function passed to withPMCredentials calls pmRegistry.getOrNull + const getter = mockWithPMCredentials.mock.calls[0][2]; + getter('trello'); + expect(mockGetOrNull).toHaveBeenCalledWith('trello'); + }); +}); diff --git a/tests/unit/triggers/shared/trigger-resolution.test.ts b/tests/unit/triggers/shared/trigger-resolution.test.ts new file mode 100644 index 00000000..ffa5ff6b --- /dev/null +++ b/tests/unit/triggers/shared/trigger-resolution.test.ts @@ -0,0 +1,130 @@ +import { beforeEach, describe, expect, it, vi } from 'vitest'; + +vi.mock('../../../../src/utils/logging.js', () => ({ + logger: { + info: vi.fn(), + warn: vi.fn(), + error: vi.fn(), + debug: vi.fn(), + }, +})); + +import { resolveTriggerResult } from '../../../../src/triggers/shared/trigger-resolution.js'; +import type { TriggerContext, TriggerResult } from '../../../../src/types/index.js'; +import { logger } from '../../../../src/utils/logging.js'; + +// --------------------------------------------------------------------------- +// Fixtures +// --------------------------------------------------------------------------- + +const mockProject = { + id: 'project-1', + name: 'Test', + repo: 'owner/repo', + baseBranch: 'main', +} as Parameters[1]['project']; + +const ctx: TriggerContext = { + project: mockProject, + source: 'github', + payload: { some: 'data' }, +}; + +const triggerResult: TriggerResult = { + agentType: 'implementation', + agentInput: { repoFullName: 'owner/repo' }, +}; + +// --------------------------------------------------------------------------- +// Tests +// --------------------------------------------------------------------------- + +describe('resolveTriggerResult', () => { + let mockRegistry: { dispatch: ReturnType }; + + beforeEach(() => { + vi.clearAllMocks(); + mockRegistry = { dispatch: vi.fn().mockResolvedValue(null) }; + }); + + it('returns preResolvedResult without dispatching when provided', async () => { + const result = await resolveTriggerResult(mockRegistry as never, ctx, triggerResult); + + expect(result).toBe(triggerResult); + expect(mockRegistry.dispatch).not.toHaveBeenCalled(); + }); + + it('logs info message with agentType when preResolvedResult is provided', async () => { + await resolveTriggerResult(mockRegistry as never, ctx, triggerResult, 'MyHandler'); + + expect(vi.mocked(logger.info)).toHaveBeenCalledWith( + expect.stringContaining('pre-resolved trigger result'), + expect.objectContaining({ agentType: 'implementation' }), + ); + }); + + it('includes logLabel in log message when provided', async () => { + await resolveTriggerResult(mockRegistry as never, ctx, triggerResult, 'MyCustomLabel'); + + expect(vi.mocked(logger.info)).toHaveBeenCalledWith( + expect.stringContaining('MyCustomLabel'), + expect.any(Object), + ); + }); + + it('falls back to ctx.source in log message when no logLabel', async () => { + await resolveTriggerResult(mockRegistry as never, ctx, triggerResult); + + expect(vi.mocked(logger.info)).toHaveBeenCalledWith( + expect.stringContaining('github'), + expect.any(Object), + ); + }); + + it('dispatches to registry when no preResolvedResult', async () => { + mockRegistry.dispatch.mockResolvedValue(triggerResult); + + const result = await resolveTriggerResult(mockRegistry as never, ctx); + + expect(mockRegistry.dispatch).toHaveBeenCalledWith(ctx); + expect(result).toBe(triggerResult); + }); + + it('returns null when registry dispatch returns null', async () => { + mockRegistry.dispatch.mockResolvedValue(null); + + const result = await resolveTriggerResult(mockRegistry as never, ctx); + + expect(result).toBeNull(); + }); + + it('logs info when no trigger matched (dispatch returns null)', async () => { + mockRegistry.dispatch.mockResolvedValue(null); + + await resolveTriggerResult(mockRegistry as never, ctx, undefined, 'TestHandler'); + + expect(vi.mocked(logger.info)).toHaveBeenCalledWith( + expect.stringContaining('no trigger matched'), + ); + }); + + it('does not log "no trigger matched" when dispatch returns a result', async () => { + mockRegistry.dispatch.mockResolvedValue(triggerResult); + + await resolveTriggerResult(mockRegistry as never, ctx); + + const infoCall = vi + .mocked(logger.info) + .mock.calls.find((call) => String(call[0]).includes('no trigger matched')); + expect(infoCall).toBeUndefined(); + }); + + it('passes undefined preResolvedResult and dispatches', async () => { + mockRegistry.dispatch.mockResolvedValue(triggerResult); + + const result = await resolveTriggerResult(mockRegistry as never, ctx, undefined); + + expect(mockRegistry.dispatch).toHaveBeenCalledOnce(); + expect(result).toBe(triggerResult); + }); +}); From 1420e20bb7571049ab0462f703d0787ac86cb7b1 Mon Sep 17 00:00:00 2001 From: aaight Date: Fri, 3 Apr 2026 19:53:53 +0200 Subject: [PATCH 14/52] refactor: replace hardcoded agentType === 'review' checks with profile-driven alternatives (#1082) Co-authored-by: Cascade Bot --- src/agents/utils/tracking.ts | 32 ++++++++++--- src/backends/llmist/index.ts | 5 +- src/backends/sidecarManager.ts | 9 ++-- src/github/personas.ts | 14 ++++++ src/router/github-token-resolver.ts | 4 +- tests/unit/agents/utils/tracking.test.ts | 47 +++++++++++++++++++ tests/unit/backends/adapter.test.ts | 9 ++++ tests/unit/backends/llmist.test.ts | 2 + tests/unit/backends/sidecarManager.test.ts | 36 +++++++++++++- .../unit/router/github-token-resolver.test.ts | 35 ++++++++++++++ 10 files changed, 178 insertions(+), 15 deletions(-) diff --git a/src/agents/utils/tracking.ts b/src/agents/utils/tracking.ts index 21709695..c8e2ced1 100644 --- a/src/agents/utils/tracking.ts +++ b/src/agents/utils/tracking.ts @@ -72,6 +72,12 @@ export interface TrackingContext { loopDetection: LoopDetectionState; /** Agent type for role-aware loop messages */ agentType?: string; + /** + * Loop advice profile to use for role-aware messages. + * When set, takes precedence over the agentType-based lookup. + * Derived from `profile.finishHooks.requiresReview`. + */ + loopAdviceProfile?: 'review' | 'default'; } /** @@ -92,12 +98,16 @@ export function createLoopDetectionState(): LoopDetectionState { /** * Create a new tracking context with zero metrics. */ -export function createTrackingContext(agentType?: string): TrackingContext { +export function createTrackingContext( + agentType?: string, + loopAdviceProfile?: 'review' | 'default', +): TrackingContext { return { metrics: { llmIterations: 0, gadgetCalls: 0 }, syntheticInvocationIds: new Set(), loopDetection: createLoopDetectionState(), agentType, + loopAdviceProfile, }; } @@ -215,8 +225,13 @@ const LOOP_ADVICE = { }, } as const; -function getAdvice(agentType?: string): (typeof LOOP_ADVICE)[keyof typeof LOOP_ADVICE] { - return agentType === 'review' ? LOOP_ADVICE.review : LOOP_ADVICE.default; +function getAdvice( + agentType?: string, + loopAdviceProfile?: 'review' | 'default', +): (typeof LOOP_ADVICE)[keyof typeof LOOP_ADVICE] { + const isReview = + loopAdviceProfile !== undefined ? loopAdviceProfile === 'review' : agentType === 'review'; + return isReview ? LOOP_ADVICE.review : LOOP_ADVICE.default; } /** @@ -236,13 +251,14 @@ function generateLoopWarning( repeatCount: number, repeatedPattern: string, agentType?: string, + loopAdviceProfile?: 'review' | 'default', ): string { const urgency = repeatCount >= 3 ? '🚨' : '⚠️'; return `[System] ${urgency} LOOP DETECTED (×${repeatCount}) Pattern: ${repeatedPattern} -${getAdvice(agentType).exact}`; +${getAdvice(agentType, loopAdviceProfile).exact}`; } /** @@ -252,9 +268,11 @@ function generateNameOnlyLoopAction( repeatCount: number, pattern: string, agentType?: string, + loopAdviceProfile?: 'review' | 'default', ): LoopAction | null { - const isReview = agentType === 'review'; - const advice = getAdvice(agentType); + const isReview = + loopAdviceProfile !== undefined ? loopAdviceProfile === 'review' : agentType === 'review'; + const advice = getAdvice(agentType, loopAdviceProfile); if (repeatCount >= LOOP_THRESHOLDS.HARD_STOP) { return { @@ -324,6 +342,7 @@ export function checkForLoopAndAdvance(context: TrackingContext): boolean { state.repeatCount, state.repeatedPattern, context.agentType, + context.loopAdviceProfile, ); } else { state.repeatCount = 1; @@ -346,6 +365,7 @@ export function checkForLoopAndAdvance(context: TrackingContext): boolean { state.nameOnlyRepeatCount, formatCallsForDisplay(state.currentIterationCalls), context.agentType, + context.loopAdviceProfile, ); } else { state.nameOnlyRepeatCount = 1; diff --git a/src/backends/llmist/index.ts b/src/backends/llmist/index.ts index 8db7ab48..e078c42a 100644 --- a/src/backends/llmist/index.ts +++ b/src/backends/llmist/index.ts @@ -65,7 +65,10 @@ export class LlmistEngine implements AgentEngine { // Create per-execution llmist logger and tracking state const llmistLogger = createLogger({ minLevel: getLogLevel() }); - const trackingContext = createTrackingContext(agentType); + const trackingContext = createTrackingContext( + agentType, + profile.finishHooks.requiresReview ? 'review' : 'default', + ); const llmCallAccumulator: AccumulatedLlmCall[] = []; // Create a LLM call logger for raw request/response file logging. diff --git a/src/backends/sidecarManager.ts b/src/backends/sidecarManager.ts index 3fd21798..0541369f 100644 --- a/src/backends/sidecarManager.ts +++ b/src/backends/sidecarManager.ts @@ -23,7 +23,7 @@ import type { AgentEngineResult } from './types.js'; */ export function createCompletionArtifacts( profile: Awaited>, - agentType: string, + _agentType: string, needsNativeToolRuntime: boolean, input: AgentInput, projectSecrets: Record, @@ -33,10 +33,9 @@ export function createCompletionArtifacts( reviewSidecarPath: string | undefined; pmWriteSidecarPath: string | undefined; } { - const reviewSidecarPath = - agentType === 'review' - ? join(tmpdir(), `cascade-review-sidecar-${process.pid}-${Date.now()}.json`) - : undefined; + const reviewSidecarPath = profile.finishHooks.requiresReview + ? join(tmpdir(), `cascade-review-sidecar-${process.pid}-${Date.now()}.json`) + : undefined; if (reviewSidecarPath) { projectSecrets[REVIEW_SIDECAR_ENV_VAR] = reviewSidecarPath; } diff --git a/src/github/personas.ts b/src/github/personas.ts index 588acbf3..45496f8e 100644 --- a/src/github/personas.ts +++ b/src/github/personas.ts @@ -17,6 +17,20 @@ export interface PersonaIdentities { // Agent → Persona Mapping // ============================================================================ +/** + * Maps agent types to their GitHub personas. + * + * This is the canonical registration point for agent persona assignments. + * - `'implementer'` — uses the implementer GitHub token for all SCM operations + * - `'reviewer'` — uses the reviewer GitHub token, appropriate for agents + * that submit PR reviews (e.g. the built-in `review` agent) + * + * To add a custom agent with reviewer behaviour, add an entry here: + * ```ts + * 'my-custom-reviewer': 'reviewer', + * ``` + * Any agent type not listed here defaults to `'implementer'`. + */ const AGENT_PERSONA_MAP: Record = { splitting: 'implementer', planning: 'implementer', diff --git a/src/router/github-token-resolver.ts b/src/router/github-token-resolver.ts index 4866919a..6dc27f97 100644 --- a/src/router/github-token-resolver.ts +++ b/src/router/github-token-resolver.ts @@ -9,6 +9,7 @@ import { getProjectGitHubToken } from '../config/projects.js'; import { findProjectByRepo, getIntegrationCredential } from '../config/provider.js'; +import { getPersonaForAgentType } from '../github/personas.js'; import type { ProjectConfig } from '../types/index.js'; import { logger } from '../utils/logging.js'; @@ -57,7 +58,8 @@ export async function resolveGitHubTokenForAckByAgent( if (!resolvedProject) return null; try { - if (agentType === 'review') { + const persona = getPersonaForAgentType(agentType); + if (persona === 'reviewer') { const token = await getIntegrationCredential(resolvedProject.id, 'scm', 'reviewer_token'); return { token, project: resolvedProject }; } diff --git a/tests/unit/agents/utils/tracking.test.ts b/tests/unit/agents/utils/tracking.test.ts index aebb1a25..e9e1c5d0 100644 --- a/tests/unit/agents/utils/tracking.test.ts +++ b/tests/unit/agents/utils/tracking.test.ts @@ -519,4 +519,51 @@ describe('loop detection', () => { expect(action?.message).toContain('delete the failing test'); }); }); + + describe('loopAdviceProfile overrides agentType', () => { + it('uses review loop advice when loopAdviceProfile is "review" and agentType is not "review"', () => { + const ctx = createTrackingContext('implementation', 'review'); + + // Create exact-match loop to trigger warning + recordGadgetCallForLoop(ctx, 'ReadFile', { filePath: '/foo.ts' }); + checkForLoopAndAdvance(ctx); + recordGadgetCallForLoop(ctx, 'ReadFile', { filePath: '/foo.ts' }); + checkForLoopAndAdvance(ctx); + + const warning = consumeLoopWarning(ctx); + expect(warning).toContain('CreatePRReview'); + expect(warning).not.toContain('COMPLETELY DIFFERENT APPROACH'); + }); + + it('uses default loop advice when loopAdviceProfile is "default" and agentType is "review"', () => { + const ctx = createTrackingContext('review', 'default'); + + // Create exact-match loop to trigger warning + recordGadgetCallForLoop(ctx, 'ReadFile', { filePath: '/foo.ts' }); + checkForLoopAndAdvance(ctx); + recordGadgetCallForLoop(ctx, 'ReadFile', { filePath: '/foo.ts' }); + checkForLoopAndAdvance(ctx); + + const warning = consumeLoopWarning(ctx); + expect(warning).toContain('COMPLETELY DIFFERENT APPROACH'); + expect(warning).not.toContain('CreatePRReview'); + }); + + it('uses review name-only loop action when loopAdviceProfile is "review" and agentType is not "review"', () => { + const ctx = createTrackingContext('implementation', 'review'); + + for (let i = 0; i < LOOP_THRESHOLDS.WARNING; i++) { + recordGadgetCallForLoop(ctx, 'FileSearchAndReplace', { + filePath: '/foo.ts', + search: `v${i}`, + }); + checkForLoopAndAdvance(ctx); + } + + const action = consumeLoopAction(ctx); + expect(action).not.toBeNull(); + expect(action?.message).toContain('CreatePRReview'); + expect(action?.message).not.toContain('delete the failing test'); + }); + }); }); diff --git a/tests/unit/backends/adapter.test.ts b/tests/unit/backends/adapter.test.ts index cef03809..420bb3fc 100644 --- a/tests/unit/backends/adapter.test.ts +++ b/tests/unit/backends/adapter.test.ts @@ -854,6 +854,9 @@ describe('executeWithEngine', () => { it('calls recordReviewSubmission when sidecar exists for review agent', async () => { setupMocks(); + mockGetAgentProfile.mockReturnValue( + makeMockProfile({ finishHooks: { requiresReview: true } }), + ); const engine = makeMockBackend(); writeSidecarAtInjectedPath(engine, { reviewUrl: 'https://github.com/o/r/pull/1#pullrequestreview-99', @@ -873,6 +876,9 @@ describe('executeWithEngine', () => { it('injects CASCADE_REVIEW_SIDECAR_PATH into projectSecrets for review agent', async () => { setupMocks(); + mockGetAgentProfile.mockReturnValue( + makeMockProfile({ finishHooks: { requiresReview: true } }), + ); const engine = makeMockBackend(); const input = makeInput(); @@ -951,6 +957,9 @@ describe('executeWithEngine', () => { it('clears initialCommentId when sidecar has ackCommentDeleted: true', async () => { setupMocks(); + mockGetAgentProfile.mockReturnValue( + makeMockProfile({ finishHooks: { requiresReview: true } }), + ); const engine = makeMockBackend(); writeSidecarAtInjectedPath(engine, { reviewUrl: 'https://github.com/o/r/pull/1#pullrequestreview-42', diff --git a/tests/unit/backends/llmist.test.ts b/tests/unit/backends/llmist.test.ts index 05153693..1e2ecc08 100644 --- a/tests/unit/backends/llmist.test.ts +++ b/tests/unit/backends/llmist.test.ts @@ -22,6 +22,7 @@ vi.mock('../../../src/agents/definitions/index.js', () => ({ vi.mock('../../../src/agents/definitions/profiles.js', () => ({ getAgentProfile: vi.fn(() => ({ getLlmistGadgets: vi.fn(() => []), + finishHooks: {}, })), })); @@ -363,6 +364,7 @@ describe('LlmistEngine.execute', () => { const mockGetLlmistGadgets = vi.fn().mockReturnValue([]); mockGetAgentProfile.mockReturnValue({ getLlmistGadgets: mockGetLlmistGadgets, + finishHooks: {}, } as ReturnType); const engine = new LlmistEngine(); diff --git a/tests/unit/backends/sidecarManager.test.ts b/tests/unit/backends/sidecarManager.test.ts index 8a3bfcd9..2bd4ac5f 100644 --- a/tests/unit/backends/sidecarManager.test.ts +++ b/tests/unit/backends/sidecarManager.test.ts @@ -59,8 +59,8 @@ function makeSidecarPath(name: string): string { } describe('createCompletionArtifacts', () => { - it('creates a review sidecar path for review agent type', () => { - const profile = makeProfile(); + it('creates a review sidecar path when profile.finishHooks.requiresReview is true', () => { + const profile = makeProfile({ finishHooks: { requiresReview: true } }); const projectSecrets: Record = {}; const result = createCompletionArtifacts( @@ -91,6 +91,38 @@ describe('createCompletionArtifacts', () => { expect(projectSecrets.CASCADE_REVIEW_SIDECAR_PATH).toBeUndefined(); }); + it('creates review sidecar for custom agent with requiresReview: true', () => { + const profile = makeProfile({ finishHooks: { requiresReview: true } }); + const projectSecrets: Record = {}; + + const result = createCompletionArtifacts( + profile, + 'custom-reviewer', + false, + {} as AgentInput, + projectSecrets, + ); + + expect(result.reviewSidecarPath).toMatch(/cascade-review-sidecar-\d+-\d+\.json$/); + expect(projectSecrets.CASCADE_REVIEW_SIDECAR_PATH).toBe(result.reviewSidecarPath); + }); + + it('does not create review sidecar when requiresReview is not set', () => { + const profile = makeProfile({ finishHooks: {} }); + const projectSecrets: Record = {}; + + const result = createCompletionArtifacts( + profile, + 'implementation', + false, + {} as AgentInput, + projectSecrets, + ); + + expect(result.reviewSidecarPath).toBeUndefined(); + expect(projectSecrets.CASCADE_REVIEW_SIDECAR_PATH).toBeUndefined(); + }); + it('creates a PR sidecar path when requiresPR and needsNativeToolRuntime', () => { const profile = makeProfile({ finishHooks: { requiresPR: true } }); const projectSecrets: Record = {}; diff --git a/tests/unit/router/github-token-resolver.test.ts b/tests/unit/router/github-token-resolver.test.ts index 6c9260b8..29e39aa2 100644 --- a/tests/unit/router/github-token-resolver.test.ts +++ b/tests/unit/router/github-token-resolver.test.ts @@ -11,6 +11,13 @@ vi.mock('../../../src/config/projects.js', () => ({ getProjectGitHubToken: vi.fn(), })); +// Mock getPersonaForAgentType for persona-based token selection tests +vi.mock('../../../src/github/personas.js', () => ({ + getPersonaForAgentType: vi.fn((agentType: string) => + agentType === 'review' ? 'reviewer' : 'implementer', + ), +})); + // Mock config cache (imported transitively) vi.mock('../../../src/config/configCache.js', () => ({ configCache: { @@ -36,11 +43,14 @@ vi.mock('../../../src/utils/logging.js', () => ({ import { getProjectGitHubToken } from '../../../src/config/projects.js'; import { findProjectByRepo, getIntegrationCredential } from '../../../src/config/provider.js'; +import { getPersonaForAgentType } from '../../../src/github/personas.js'; import { resolveGitHubTokenForAck, resolveGitHubTokenForAckByAgent, } from '../../../src/router/github-token-resolver.js'; +const mockGetPersonaForAgentType = vi.mocked(getPersonaForAgentType); + const mockGetIntegrationCredential = vi.mocked(getIntegrationCredential); const mockGetProjectGitHubToken = vi.mocked(getProjectGitHubToken); const mockFindProjectByRepo = vi.mocked(findProjectByRepo); @@ -184,4 +194,29 @@ describe('resolveGitHubTokenForAckByAgent', () => { expect(mockFindProjectByRepo).not.toHaveBeenCalled(); expect(mockGetProjectGitHubToken).toHaveBeenCalledWith(preResolvedProject); }); + + it('delegates to getPersonaForAgentType for reviewer token selection', async () => { + mockGetPersonaForAgentType.mockReturnValueOnce('reviewer'); + mockGetIntegrationCredential.mockImplementation(async (_projectId, category, role) => { + if (category === 'scm' && role === 'reviewer_token') return 'custom-reviewer-token'; + throw new Error(`Credential '${category}/${role}' not found`); + }); + + const result = await resolveGitHubTokenForAckByAgent('owner/repo', 'custom-review-agent'); + + expect(result).not.toBeNull(); + expect(result?.token).toBe('custom-reviewer-token'); + expect(mockGetPersonaForAgentType).toHaveBeenCalledWith('custom-review-agent'); + }); + + it('delegates to getPersonaForAgentType for implementer token selection', async () => { + mockGetPersonaForAgentType.mockReturnValueOnce('implementer'); + + const result = await resolveGitHubTokenForAckByAgent('owner/repo', 'custom-impl-agent'); + + expect(result).not.toBeNull(); + expect(result?.token).toBe('test-github-token'); + expect(mockGetPersonaForAgentType).toHaveBeenCalledWith('custom-impl-agent'); + expect(mockGetProjectGitHubToken).toHaveBeenCalled(); + }); }); From b9477fd54d466e25074d752922043e449e492ff0 Mon Sep 17 00:00:00 2001 From: aaight Date: Sat, 4 Apr 2026 16:26:59 +0200 Subject: [PATCH 15/52] refactor(agents): migrate deprecated sync loader functions to non-deprecated alternatives (#1083) Co-authored-by: Cascade Bot --- src/agents/definitions/index.ts | 6 +- src/agents/definitions/loader.ts | 44 +++---- src/agents/prompts/index.ts | 11 +- src/api/routers/agentConfigs.ts | 4 +- src/api/routers/agentDefinitions.ts | 32 ++--- src/api/routers/agentTriggerConfigs.ts | 6 +- src/db/seeds/seedAgentDefinitions.ts | 6 +- .../agents/definitions/async-resolver.test.ts | 24 ++-- tests/unit/agents/definitions/loader.test.ts | 115 ++++++++---------- .../agents/shared/modelResolution.test.ts | 4 +- tests/unit/api/routers/agentConfigs.test.ts | 4 +- .../unit/api/routers/agentDefinitions.test.ts | 38 +++--- ...ggerConfigs.getProjectTriggersView.test.ts | 30 ++--- tests/unit/backends/llmist.test.ts | 2 +- 14 files changed, 153 insertions(+), 173 deletions(-) diff --git a/src/agents/definitions/index.ts b/src/agents/definitions/index.ts index 2e6ea670..c310a41e 100644 --- a/src/agents/definitions/index.ts +++ b/src/agents/definitions/index.ts @@ -19,11 +19,11 @@ export { export type { FetchContextParams } from './contextSteps.js'; export { clearDefinitionCache, - getKnownAgentTypes, + getBuiltinAgentTypes, invalidateDefinitionCache, + isBuiltinAgentType, isPMFocusedAgent, - loadAgentDefinition, - loadAllAgentDefinitions, + loadBuiltinDefinition, resolveAgentDefinition, resolveAllAgentDefinitions, resolveKnownAgentTypes, diff --git a/src/agents/definitions/loader.ts b/src/agents/definitions/loader.ts index dabeafff..73ec55db 100644 --- a/src/agents/definitions/loader.ts +++ b/src/agents/definitions/loader.ts @@ -18,12 +18,14 @@ const cache = new Map(); let knownTypes: string[] | null = null; /** - * Load and validate a single agent definition from YAML. + * Load and validate a single built-in agent definition from YAML. * Results are cached after first load. * - * @deprecated Use `resolveAgentDefinition()` instead (checks cache → DB → YAML). + * Use this in sync contexts that genuinely need YAML-only access (seed scripts, + * reset operations, internal fallbacks). For general use prefer `resolveAgentDefinition()` + * which checks the in-memory cache, then the database, then falls back to YAML. */ -export function loadAgentDefinition(agentType: string): AgentDefinition { +export function loadBuiltinDefinition(agentType: string): AgentDefinition { const cached = cache.get(agentType); if (cached) return cached; @@ -47,25 +49,13 @@ export function loadAgentDefinition(agentType: string): AgentDefinition { } /** - * Load all agent definitions discovered from YAML files in the definitions directory. + * Return the list of built-in agent types (derived from YAML filenames). * - * @deprecated Use `resolveAllAgentDefinitions()` instead (checks DB with YAML fallback). + * Use this in sync contexts that genuinely need YAML-only type enumeration. + * For general use prefer `resolveKnownAgentTypes()` which also includes + * custom types stored only in the database. */ -export function loadAllAgentDefinitions(): Map { - const types = getKnownAgentTypes(); - const result = new Map(); - for (const agentType of types) { - result.set(agentType, loadAgentDefinition(agentType)); - } - return result; -} - -/** - * Return the list of known agent types (derived from YAML filenames). - * - * @deprecated Use `resolveKnownAgentTypes()` instead (returns types from both DB and YAML). - */ -export function getKnownAgentTypes(): string[] { +export function getBuiltinAgentTypes(): string[] { if (knownTypes) return knownTypes; const entries = readdirSync(__dirname); @@ -86,11 +76,11 @@ export function clearDefinitionCache(): void { /** * Returns true if the given agentType has a backing YAML file (i.e. is a built-in type). - * Wraps `getKnownAgentTypes().includes()` to avoid repeated deprecated-function calls at each - * call site. + * Wraps `getBuiltinAgentTypes().includes()` to avoid repeated deprecated-function calls at + * each call site. */ export function isBuiltinAgentType(agentType: string): boolean { - return getKnownAgentTypes().includes(agentType); + return getBuiltinAgentTypes().includes(agentType); } // ============================================================================ @@ -123,7 +113,7 @@ export async function resolveAgentDefinition(agentType: string): Promise` covering all known agent types. */ export async function resolveAllAgentDefinitions(): Promise> { - const yamlTypes = getKnownAgentTypes(); + const yamlTypes = getBuiltinAgentTypes(); const result = new Map(); // Fetch all DB entries first @@ -155,7 +145,7 @@ export async function resolveAllAgentDefinitions(): Promise { - const yamlTypes = new Set(getKnownAgentTypes()); + const yamlTypes = new Set(getBuiltinAgentTypes()); try { const { listAgentDefinitions } = await import( diff --git a/src/agents/prompts/index.ts b/src/agents/prompts/index.ts index 0c3cc73e..48a5d75d 100644 --- a/src/agents/prompts/index.ts +++ b/src/agents/prompts/index.ts @@ -3,8 +3,7 @@ import { dirname, join } from 'node:path'; import { fileURLToPath } from 'node:url'; import { Eta } from 'eta'; -import { resolveKnownAgentTypes } from '../definitions/index.js'; -import { loadAgentDefinition } from '../definitions/loader.js'; +import { resolveAgentDefinition, resolveKnownAgentTypes } from '../definitions/index.js'; const __dirname = dirname(fileURLToPath(import.meta.url)); const templatesDir = join(__dirname, 'templates'); @@ -236,13 +235,13 @@ export function renderInlineTaskPrompt( } /** - * Returns the YAML-defined taskPrompt for an agent type (the factory default). - * Does not require initPrompts() — reads directly from YAML. + * Returns the taskPrompt for an agent type (the factory default). + * Checks the database (with YAML fallback) via `resolveAgentDefinition()`. * Returns null if the agent type is unknown or has no taskPrompt defined. */ -export function getDefaultTaskPrompt(agentType: string): string | null { +export async function getDefaultTaskPrompt(agentType: string): Promise { try { - const definition = loadAgentDefinition(agentType); + const definition = await resolveAgentDefinition(agentType); return definition.prompts.taskPrompt ?? null; } catch { return null; diff --git a/src/api/routers/agentConfigs.ts b/src/api/routers/agentConfigs.ts index 39c79586..b6f7cbfd 100644 --- a/src/api/routers/agentConfigs.ts +++ b/src/api/routers/agentConfigs.ts @@ -196,8 +196,8 @@ export const agentConfigsRouter = router({ // No .eta template on disk — skip gracefully } - // 4. YAML-defined task prompt (factory default) - const defaultTaskPrompt = getDefaultTaskPrompt(input.agentType); + // 4. Task prompt factory default (checks DB then YAML fallback) + const defaultTaskPrompt = await getDefaultTaskPrompt(input.agentType); return { projectSystemPrompt, diff --git a/src/api/routers/agentDefinitions.ts b/src/api/routers/agentDefinitions.ts index 3999b969..b4c646b4 100644 --- a/src/api/routers/agentDefinitions.ts +++ b/src/api/routers/agentDefinitions.ts @@ -2,10 +2,10 @@ import { TRPCError } from '@trpc/server'; import { z } from 'zod'; import { CAPABILITIES } from '../../agents/capabilities/index.js'; import { - getKnownAgentTypes, + getBuiltinAgentTypes, invalidateDefinitionCache, isBuiltinAgentType, - loadAgentDefinition, + loadBuiltinDefinition, resolveAgentDefinition, resolveKnownAgentTypes, } from '../../agents/definitions/loader.js'; @@ -60,10 +60,10 @@ export const agentDefinitionsRouter = router({ * resolveAllAgentDefinitions() which would issue its own redundant listAgentDefinitions() call. */ list: superAdminProcedure.query(async () => { - // Intentional: getKnownAgentTypes() (deprecated) is used here to enumerate YAML types - // for the merge loop below. resolveKnownAgentTypes() also hits the DB, which we already - // cover via listAgentDefinitions(); calling both would be redundant. - const yamlTypes = getKnownAgentTypes(); + // getBuiltinAgentTypes() enumerates YAML types for the merge loop below. + // resolveKnownAgentTypes() also hits the DB, which we already cover via + // listAgentDefinitions(); calling both would be redundant. + const yamlTypes = getBuiltinAgentTypes(); const result: Array<{ agentType: string; definition: AgentDefinition; isBuiltin: boolean }> = []; @@ -84,15 +84,15 @@ export const agentDefinitionsRouter = router({ seen.add(row.agentType); } - // Fill in YAML-only types not present in DB - // Intentional: loadAgentDefinition() (deprecated) is used here because this is a - // synchronous fallback path — we already have the YAML type list and just need the - // raw definition content; the async resolveAgentDefinition() would add DB round-trips. + // Fill in YAML-only types not present in DB. + // loadBuiltinDefinition() is used here because this is a synchronous fallback path — + // we already have the YAML type list and just need the raw definition content; + // the async resolveAgentDefinition() would add unnecessary DB round-trips. for (const agentType of yamlTypes) { if (!seen.has(agentType)) { result.push({ agentType, - definition: loadAgentDefinition(agentType), + definition: loadBuiltinDefinition(agentType), isBuiltin: true, // YAML-only types are always builtin }); } @@ -232,11 +232,11 @@ export const agentDefinitionsRouter = router({ } // Re-read the YAML (bypass cache). - // Intentional: loadAgentDefinition() (deprecated) is used here because this endpoint - // explicitly needs the raw YAML definition — the purpose of reset is to bypass any DB - // override and restore the hard-coded YAML defaults. + // loadBuiltinDefinition() is used here because this endpoint explicitly needs the + // raw YAML definition — the purpose of reset is to bypass any DB override and + // restore the hard-coded YAML defaults. invalidateDefinitionCache(); - const yamlDefinition = loadAgentDefinition(input.agentType); + const yamlDefinition = loadBuiltinDefinition(input.agentType); await upsertAgentDefinition(input.agentType, yamlDefinition, true); invalidateDefinitionCache(); return { agentType: input.agentType }; @@ -311,7 +311,7 @@ export const agentDefinitionsRouter = router({ // Load YAML defaults and use its prompts section let yamlDefault: AgentDefinition; try { - yamlDefault = loadAgentDefinition(input.agentType); + yamlDefault = loadBuiltinDefinition(input.agentType); } catch { throw new TRPCError({ code: 'NOT_FOUND', diff --git a/src/api/routers/agentTriggerConfigs.ts b/src/api/routers/agentTriggerConfigs.ts index ae0f0823..0a9453fc 100644 --- a/src/api/routers/agentTriggerConfigs.ts +++ b/src/api/routers/agentTriggerConfigs.ts @@ -1,6 +1,6 @@ import { TRPCError } from '@trpc/server'; import { z } from 'zod'; -import { getKnownAgentTypes, loadAgentDefinition } from '../../agents/definitions/loader.js'; +import { getBuiltinAgentTypes, loadBuiltinDefinition } from '../../agents/definitions/loader.js'; import type { AgentDefinition, SupportedTrigger, @@ -220,7 +220,7 @@ export const agentTriggerConfigsRouter = router({ const enabledAgentTypes = new Set(projectAgentConfigs.map((c) => c.agentType)); // Build a combined list of definitions (DB + YAML) - const yamlTypes = getKnownAgentTypes(); + const yamlTypes = getBuiltinAgentTypes(); const definitions: Array<{ agentType: string; definition: AgentDefinition }> = []; const seen = new Set(); @@ -234,7 +234,7 @@ export const agentTriggerConfigsRouter = router({ for (const agentType of yamlTypes) { if (!seen.has(agentType)) { try { - definitions.push({ agentType, definition: loadAgentDefinition(agentType) }); + definitions.push({ agentType, definition: loadBuiltinDefinition(agentType) }); } catch (err) { logger.warn('Failed to load agent definition from YAML', { agentType, error: err }); } diff --git a/src/db/seeds/seedAgentDefinitions.ts b/src/db/seeds/seedAgentDefinitions.ts index f8abf911..7e6d92b1 100644 --- a/src/db/seeds/seedAgentDefinitions.ts +++ b/src/db/seeds/seedAgentDefinitions.ts @@ -8,17 +8,17 @@ * npx tsx src/db/seeds/seedAgentDefinitions.ts */ -import { getKnownAgentTypes, loadAgentDefinition } from '../../agents/definitions/loader.js'; +import { getBuiltinAgentTypes, loadBuiltinDefinition } from '../../agents/definitions/loader.js'; import { readTemplateFileSync } from '../../agents/prompts/index.js'; import { upsertAgentDefinition } from '../repositories/agentDefinitionsRepository.js'; export async function seedAgentDefinitions(): Promise { - const agentTypes = getKnownAgentTypes(); + const agentTypes = getBuiltinAgentTypes(); console.log(`Seeding ${agentTypes.length} agent definitions...`); for (const agentType of agentTypes) { - const definition = loadAgentDefinition(agentType); + const definition = loadBuiltinDefinition(agentType); const systemPrompt = readTemplateFileSync(agentType); const enriched = systemPrompt ? { ...definition, prompts: { ...definition.prompts, systemPrompt } } diff --git a/tests/unit/agents/definitions/async-resolver.test.ts b/tests/unit/agents/definitions/async-resolver.test.ts index 157bf0fa..fa7eeee9 100644 --- a/tests/unit/agents/definitions/async-resolver.test.ts +++ b/tests/unit/agents/definitions/async-resolver.test.ts @@ -2,7 +2,7 @@ import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest'; import { clearDefinitionCache, invalidateDefinitionCache, - loadAgentDefinition, + loadBuiltinDefinition, resolveAgentDefinition, resolveAllAgentDefinitions, resolveKnownAgentTypes, @@ -51,7 +51,7 @@ describe('resolveAgentDefinition', () => { it('returns from in-memory cache when already loaded', async () => { // Prime the cache via the sync loader - const fromYaml = loadAgentDefinition('implementation'); + const fromYaml = loadBuiltinDefinition('implementation'); const { getAgentDefinition } = await getDbMocks(); // resolveAgentDefinition should return the cached value without hitting DB @@ -62,7 +62,7 @@ describe('resolveAgentDefinition', () => { it('fetches from DB when cache is empty and DB has the definition', async () => { const { getAgentDefinition } = await getDbMocks(); - const dbDef = loadAgentDefinition('planning'); + const dbDef = loadBuiltinDefinition('planning'); clearDefinitionCache(); getAgentDefinition.mockResolvedValue(dbDef); @@ -77,7 +77,7 @@ describe('resolveAgentDefinition', () => { getAgentDefinition.mockResolvedValue(null); const result = await resolveAgentDefinition('splitting'); - const expected = loadAgentDefinition('splitting'); + const expected = loadBuiltinDefinition('splitting'); expect(result).toEqual(expected); }); @@ -86,13 +86,13 @@ describe('resolveAgentDefinition', () => { getAgentDefinition.mockRejectedValue(new Error('DB connection failed')); const result = await resolveAgentDefinition('review'); - const expected = loadAgentDefinition('review'); + const expected = loadBuiltinDefinition('review'); expect(result).toEqual(expected); }); it('caches DB result so subsequent calls skip DB', async () => { const { getAgentDefinition } = await getDbMocks(); - const dbDef = loadAgentDefinition('debug'); + const dbDef = loadBuiltinDefinition('debug'); clearDefinitionCache(); getAgentDefinition.mockResolvedValue(dbDef); @@ -127,7 +127,7 @@ describe('resolveAllAgentDefinitions', () => { it('prefers DB definitions over YAML when present in DB', async () => { const { listAgentDefinitions } = await getDbMocks(); - const dbDef = loadAgentDefinition('implementation'); + const dbDef = loadBuiltinDefinition('implementation'); clearDefinitionCache(); // Simulate DB having only "implementation" @@ -172,7 +172,7 @@ describe('resolveKnownAgentTypes', () => { it('merges DB-only types with YAML types', async () => { const { listAgentDefinitions } = await getDbMocks(); - const customDef = loadAgentDefinition('implementation'); + const customDef = loadBuiltinDefinition('implementation'); clearDefinitionCache(); listAgentDefinitions.mockResolvedValue([ @@ -213,8 +213,8 @@ describe('invalidateDefinitionCache', () => { it('clears the in-memory cache so next resolve hits DB', async () => { const { getAgentDefinition } = await getDbMocks(); - const dbDef = loadAgentDefinition('planning'); - // Clear cache after priming via loadAgentDefinition so resolveAgentDefinition hits DB + const dbDef = loadBuiltinDefinition('planning'); + // Clear cache after priming via loadBuiltinDefinition so resolveAgentDefinition hits DB clearDefinitionCache(); getAgentDefinition.mockResolvedValue(dbDef); @@ -229,10 +229,10 @@ describe('invalidateDefinitionCache', () => { }); it('behaves identically to clearDefinitionCache for the sync path', () => { - loadAgentDefinition('splitting'); // prime cache + loadBuiltinDefinition('splitting'); // prime cache invalidateDefinitionCache(); // Sync load still works (reads fresh from YAML) - expect(() => loadAgentDefinition('splitting')).not.toThrow(); + expect(() => loadBuiltinDefinition('splitting')).not.toThrow(); }); }); diff --git a/tests/unit/agents/definitions/loader.test.ts b/tests/unit/agents/definitions/loader.test.ts index 3a2e9a3a..fb4631ed 100644 --- a/tests/unit/agents/definitions/loader.test.ts +++ b/tests/unit/agents/definitions/loader.test.ts @@ -5,11 +5,10 @@ import { } from '../../../../src/agents/capabilities/resolver.js'; import { clearDefinitionCache, - getKnownAgentTypes, + getBuiltinAgentTypes, isBuiltinAgentType, isPMFocusedAgent, - loadAgentDefinition, - loadAllAgentDefinitions, + loadBuiltinDefinition, } from '../../../../src/agents/definitions/loader.js'; import { CONTEXT_STEP_REGISTRY } from '../../../../src/agents/definitions/strategies.js'; import { getAgentCapabilities } from '../../../../src/agents/shared/capabilities.js'; @@ -34,9 +33,9 @@ describe('YAML agent definitions loader', () => { clearDefinitionCache(); }); - describe('getKnownAgentTypes', () => { + describe('getBuiltinAgentTypes', () => { it('discovers all 12 agent types from YAML files', () => { - const types = getKnownAgentTypes(); + const types = getBuiltinAgentTypes(); expect(types).toEqual(ALL_AGENT_TYPES); }); }); @@ -54,46 +53,38 @@ describe('YAML agent definitions loader', () => { }); }); - describe('loadAgentDefinition', () => { + describe('loadBuiltinDefinition', () => { it('loads and parses each agent definition without error', () => { for (const agentType of ALL_AGENT_TYPES) { - expect(() => loadAgentDefinition(agentType)).not.toThrow(); + expect(() => loadBuiltinDefinition(agentType)).not.toThrow(); } }); it('throws for unknown agent type', () => { - expect(() => loadAgentDefinition('nonexistent-agent')).toThrow('Agent definition not found'); + expect(() => loadBuiltinDefinition('nonexistent-agent')).toThrow( + 'Agent definition not found', + ); }); it('caches parsed definitions', () => { - const first = loadAgentDefinition('implementation'); - const second = loadAgentDefinition('implementation'); + const first = loadBuiltinDefinition('implementation'); + const second = loadBuiltinDefinition('implementation'); expect(first).toBe(second); }); it('returns fresh results after cache clear', () => { - const first = loadAgentDefinition('implementation'); + const first = loadBuiltinDefinition('implementation'); clearDefinitionCache(); - const second = loadAgentDefinition('implementation'); + const second = loadBuiltinDefinition('implementation'); expect(first).not.toBe(second); expect(first).toEqual(second); }); }); - describe('loadAllAgentDefinitions', () => { - it('returns a map with all 12 agent types', () => { - const all = loadAllAgentDefinitions(); - expect(all.size).toBe(ALL_AGENT_TYPES.length); - for (const agentType of ALL_AGENT_TYPES) { - expect(all.has(agentType)).toBe(true); - } - }); - }); - describe('strategy references resolve correctly', () => { it('all agents have valid capabilities', () => { for (const agentType of ALL_AGENT_TYPES) { - const def = loadAgentDefinition(agentType); + const def = loadBuiltinDefinition(agentType); expect(Array.isArray(def.capabilities.required)).toBe(true); expect(Array.isArray(def.capabilities.optional)).toBe(true); expect(def.capabilities.required.length).toBeGreaterThan(0); @@ -103,7 +94,7 @@ describe('YAML agent definitions loader', () => { it('agents with fs or shell capabilities derive to non-empty SDK tools', () => { // Only agents with fs:* or shell:exec capabilities need SDK tools. for (const agentType of ALL_AGENT_TYPES) { - const def = loadAgentDefinition(agentType); + const def = loadBuiltinDefinition(agentType); const allCaps = [...def.capabilities.required, ...def.capabilities.optional]; // Check if agent has any capabilities that provide SDK tools @@ -123,7 +114,7 @@ describe('YAML agent definitions loader', () => { it('all trigger contextPipeline step references exist in CONTEXT_STEP_REGISTRY', () => { for (const agentType of ALL_AGENT_TYPES) { - const def = loadAgentDefinition(agentType); + const def = loadBuiltinDefinition(agentType); for (const trigger of def.triggers ?? []) { for (const step of trigger.contextPipeline ?? []) { expect( @@ -137,7 +128,7 @@ describe('YAML agent definitions loader', () => { it('all agents have prompts.taskPrompt defined', () => { for (const agentType of ALL_AGENT_TYPES) { - const def = loadAgentDefinition(agentType); + const def = loadBuiltinDefinition(agentType); expect( typeof def.prompts?.taskPrompt === 'string' && def.prompts.taskPrompt.length > 0, `${agentType}: prompts.taskPrompt is missing or empty`, @@ -148,20 +139,20 @@ describe('YAML agent definitions loader', () => { describe('definition content spot checks', () => { it('implementation has requiresPR flag in hooks.finish.scm', () => { - const def = loadAgentDefinition('implementation'); + const def = loadBuiltinDefinition('implementation'); expect(def.hooks?.finish?.scm?.requiresPR).toBe(true); }); it('non-implementation agents do not have hooks.finish.scm.requiresPR', () => { for (const agentType of ALL_AGENT_TYPES.filter((t) => t !== 'implementation')) { - const def = loadAgentDefinition(agentType); + const def = loadBuiltinDefinition(agentType); expect(def.hooks?.finish?.scm?.requiresPR).toBeUndefined(); } }); it('work-item agents have triggers with standard context pipeline', () => { // implementation, splitting, planning triggers include workItem context - const def = loadAgentDefinition('implementation'); + const def = loadBuiltinDefinition('implementation'); const statusChangedTrigger = def.triggers.find((t) => t.event === 'pm:status-changed'); expect(statusChangedTrigger?.contextPipeline).toEqual([ 'directoryListing', @@ -173,19 +164,19 @@ describe('YAML agent definitions loader', () => { }); it('review agent triggers use PR context pipeline', () => { - const def = loadAgentDefinition('review'); + const def = loadBuiltinDefinition('review'); const ciPassedTrigger = def.triggers.find((t) => t.event === 'scm:check-suite-success'); expect(ciPassedTrigger?.contextPipeline).toEqual(['prContext', 'contextFiles', 'squint']); }); it('planning agent does not have pm:comment-mention trigger (routed to respond-to-planning-comment)', () => { - const def = loadAgentDefinition('planning'); + const def = loadBuiltinDefinition('planning'); const commentMentionTrigger = def.triggers.find((t) => t.event === 'pm:comment-mention'); expect(commentMentionTrigger).toBeUndefined(); }); it('review agent does not have lifecycle triggers (scm:pr-ready-to-merge, scm:pr-merged)', () => { - const def = loadAgentDefinition('review'); + const def = loadBuiltinDefinition('review'); const prReadyTrigger = def.triggers.find((t) => t.event === 'scm:pr-ready-to-merge'); const prMergedTrigger = def.triggers.find((t) => t.event === 'scm:pr-merged'); expect(prReadyTrigger).toBeUndefined(); @@ -193,7 +184,7 @@ describe('YAML agent definitions loader', () => { }); it('respond-to-ci trigger uses combined PR + work-item pipeline', () => { - const def = loadAgentDefinition('respond-to-ci'); + const def = loadBuiltinDefinition('respond-to-ci'); const ciFailureTrigger = def.triggers.find((t) => t.event === 'scm:check-suite-failure'); expect(ciFailureTrigger?.contextPipeline).toEqual([ 'prContext', @@ -205,7 +196,7 @@ describe('YAML agent definitions loader', () => { }); it('PR comment agents have triggers with conversation pipeline', () => { - const def = loadAgentDefinition('respond-to-pr-comment'); + const def = loadBuiltinDefinition('respond-to-pr-comment'); const prCommentTrigger = def.triggers.find((t) => t.event === 'scm:pr-comment-mention'); expect(prCommentTrigger?.contextPipeline).toEqual([ 'prContext', @@ -217,13 +208,13 @@ describe('YAML agent definitions loader', () => { }); it('planning has read-only capabilities (no fs:write)', () => { - const def = loadAgentDefinition('planning'); + const def = loadBuiltinDefinition('planning'); expect(def.capabilities.required).toContain('fs:read'); expect(def.capabilities.required).not.toContain('fs:write'); }); it('implementation has trailing hooks with all flags', () => { - const def = loadAgentDefinition('implementation'); + const def = loadBuiltinDefinition('implementation'); expect(def.hooks?.trailing).toEqual({ scm: { gitStatus: true, prStatus: true }, builtin: { diagnostics: true, todoProgress: true, reminder: true }, @@ -231,37 +222,37 @@ describe('YAML agent definitions loader', () => { }); it('respond-to-review has diagnostics-only trailing hooks', () => { - const def = loadAgentDefinition('respond-to-review'); + const def = loadBuiltinDefinition('respond-to-review'); expect(def.hooks?.trailing).toEqual({ builtin: { diagnostics: true }, }); }); it('respond-to-ci has diagnostics-only trailing hooks', () => { - const def = loadAgentDefinition('respond-to-ci'); + const def = loadBuiltinDefinition('respond-to-ci'); expect(def.hooks?.trailing).toEqual({ builtin: { diagnostics: true }, }); }); it('splitting has no hooks', () => { - const def = loadAgentDefinition('splitting'); + const def = loadBuiltinDefinition('splitting'); expect(def.hooks).toBeUndefined(); }); it('respond-to-review includes review comment gadget options', () => { - const def = loadAgentDefinition('respond-to-review'); + const def = loadBuiltinDefinition('respond-to-review'); expect(def.strategies.gadgetOptions).toEqual({ includeReviewComments: true }); }); it('respond-to-pr-comment includes review comment gadget options', () => { - const def = loadAgentDefinition('respond-to-pr-comment'); + const def = loadBuiltinDefinition('respond-to-pr-comment'); expect(def.strategies.gadgetOptions).toEqual({ includeReviewComments: true }); }); it('all agents have non-empty identity fields', () => { for (const agentType of ALL_AGENT_TYPES) { - const def = loadAgentDefinition(agentType); + const def = loadBuiltinDefinition(agentType); expect(def.identity.emoji.length).toBeGreaterThan(0); expect(def.identity.label.length).toBeGreaterThan(0); expect(def.identity.roleHint.length).toBeGreaterThan(0); @@ -271,13 +262,13 @@ describe('YAML agent definitions loader', () => { it('all agents have non-empty hints', () => { for (const agentType of ALL_AGENT_TYPES) { - const def = loadAgentDefinition(agentType); + const def = loadBuiltinDefinition(agentType); expect(def.hint.length).toBeGreaterThan(0); } }); it('backlog-manager has pm:status-changed, scm:pr-merged, and internal:auto-chain triggers', () => { - const def = loadAgentDefinition('backlog-manager'); + const def = loadBuiltinDefinition('backlog-manager'); const statusChangedTrigger = def.triggers.find((t) => t.event === 'pm:status-changed'); const prMergedTrigger = def.triggers.find((t) => t.event === 'scm:pr-merged'); const autoChainTrigger = def.triggers.find((t) => t.event === 'internal:auto-chain'); @@ -287,7 +278,7 @@ describe('YAML agent definitions loader', () => { }); it('backlog-manager integration triggers are defaultEnabled: false (opt-in)', () => { - const def = loadAgentDefinition('backlog-manager'); + const def = loadBuiltinDefinition('backlog-manager'); const integrationTriggers = def.triggers.filter((t) => !t.event.startsWith('internal:')); for (const trigger of integrationTriggers) { expect(trigger.defaultEnabled).toBe(false); @@ -295,13 +286,13 @@ describe('YAML agent definitions loader', () => { }); it('backlog-manager internal:auto-chain trigger is defaultEnabled: false (all triggers off by default)', () => { - const def = loadAgentDefinition('backlog-manager'); + const def = loadBuiltinDefinition('backlog-manager'); const autoChainTrigger = def.triggers.find((t) => t.event === 'internal:auto-chain'); expect(autoChainTrigger?.defaultEnabled).toBe(false); }); it('backlog-manager requires only pm integration', () => { - const def = loadAgentDefinition('backlog-manager'); + const def = loadBuiltinDefinition('backlog-manager'); expect(def.integrations?.required).toContain('pm'); expect(def.integrations?.optional ?? []).not.toContain('scm'); }); @@ -309,7 +300,7 @@ describe('YAML agent definitions loader', () => { describe('roundtrip: YAML definition → profile properties', () => { it('implementation agent has full capabilities and stop hooks', async () => { - const def = loadAgentDefinition('implementation'); + const def = loadBuiltinDefinition('implementation'); const caps = await getAgentCapabilities('implementation'); expect(caps.canEditFiles).toBe(true); @@ -321,7 +312,7 @@ describe('YAML agent definitions loader', () => { }); it('review agent is read-only', async () => { - const def = loadAgentDefinition('review'); + const def = loadBuiltinDefinition('review'); const caps = await getAgentCapabilities('review'); expect(caps.canEditFiles).toBe(false); @@ -331,7 +322,7 @@ describe('YAML agent definitions loader', () => { }); it('respond-to-ci agent requires scm integration', async () => { - const def = loadAgentDefinition('respond-to-ci'); + const def = loadBuiltinDefinition('respond-to-ci'); const caps = await getAgentCapabilities('respond-to-ci'); expect(caps.canEditFiles).toBe(true); @@ -340,7 +331,7 @@ describe('YAML agent definitions loader', () => { it('capabilities from getAgentCapabilities are derived correctly for all agents', async () => { for (const agentType of ALL_AGENT_TYPES) { - const def = loadAgentDefinition(agentType); + const def = loadBuiltinDefinition(agentType); const caps = await getAgentCapabilities(agentType); const allCaps = [...def.capabilities.required, ...def.capabilities.optional]; @@ -408,14 +399,14 @@ describe('YAML agent definitions loader', () => { describe('integration requirements (derived from capabilities)', () => { it('all agents have valid capabilities with required array', () => { for (const agentType of ALL_AGENT_TYPES) { - const def = loadAgentDefinition(agentType); + const def = loadBuiltinDefinition(agentType); expect(Array.isArray(def.capabilities.required)).toBe(true); expect(Array.isArray(def.capabilities.optional)).toBe(true); } }); it('implementation agent requires scm and pm (derived from capabilities)', () => { - const def = loadAgentDefinition('implementation'); + const def = loadBuiltinDefinition('implementation'); const integrations = deriveIntegrations(def.capabilities.required, def.capabilities.optional); // Order may vary - use set comparison expect(new Set(integrations.required)).toEqual(new Set(['scm', 'pm'])); @@ -423,56 +414,56 @@ describe('YAML agent definitions loader', () => { }); it('splitting agent requires pm only', () => { - const def = loadAgentDefinition('splitting'); + const def = loadBuiltinDefinition('splitting'); const integrations = deriveIntegrations(def.capabilities.required, def.capabilities.optional); expect(integrations.required).toEqual(['pm']); expect(integrations.optional).toEqual([]); }); it('planning agent requires pm only', () => { - const def = loadAgentDefinition('planning'); + const def = loadBuiltinDefinition('planning'); const integrations = deriveIntegrations(def.capabilities.required, def.capabilities.optional); expect(integrations.required).toEqual(['pm']); expect(integrations.optional).toEqual([]); }); it('review agent requires scm, pm is optional', () => { - const def = loadAgentDefinition('review'); + const def = loadBuiltinDefinition('review'); const integrations = deriveIntegrations(def.capabilities.required, def.capabilities.optional); expect(integrations.required).toEqual(['scm']); expect(integrations.optional).toEqual(['pm']); }); it('respond-to-review agent requires scm, pm is optional', () => { - const def = loadAgentDefinition('respond-to-review'); + const def = loadBuiltinDefinition('respond-to-review'); const integrations = deriveIntegrations(def.capabilities.required, def.capabilities.optional); expect(integrations.required).toEqual(['scm']); expect(integrations.optional).toEqual(['pm']); }); it('respond-to-ci agent requires scm, pm is optional', () => { - const def = loadAgentDefinition('respond-to-ci'); + const def = loadBuiltinDefinition('respond-to-ci'); const integrations = deriveIntegrations(def.capabilities.required, def.capabilities.optional); expect(integrations.required).toEqual(['scm']); expect(integrations.optional).toEqual(['pm']); }); it('respond-to-pr-comment agent requires scm, pm is optional', () => { - const def = loadAgentDefinition('respond-to-pr-comment'); + const def = loadBuiltinDefinition('respond-to-pr-comment'); const integrations = deriveIntegrations(def.capabilities.required, def.capabilities.optional); expect(integrations.required).toEqual(['scm']); expect(integrations.optional).toEqual(['pm']); }); it('respond-to-planning-comment agent requires pm only', () => { - const def = loadAgentDefinition('respond-to-planning-comment'); + const def = loadBuiltinDefinition('respond-to-planning-comment'); const integrations = deriveIntegrations(def.capabilities.required, def.capabilities.optional); expect(integrations.required).toEqual(['pm']); expect(integrations.optional).toEqual([]); }); it('debug agent requires pm only', () => { - const def = loadAgentDefinition('debug'); + const def = loadBuiltinDefinition('debug'); const integrations = deriveIntegrations(def.capabilities.required, def.capabilities.optional); expect(integrations.required).toEqual(['pm']); expect(integrations.optional).toEqual([]); @@ -481,7 +472,7 @@ describe('YAML agent definitions loader', () => { it('all derived integration categories are valid', () => { const validCategories = ['pm', 'scm', 'email', 'alerting']; for (const agentType of ALL_AGENT_TYPES) { - const def = loadAgentDefinition(agentType); + const def = loadBuiltinDefinition(agentType); const integrations = deriveIntegrations( def.capabilities.required, def.capabilities.optional, diff --git a/tests/unit/agents/shared/modelResolution.test.ts b/tests/unit/agents/shared/modelResolution.test.ts index 88474f6b..4dce9605 100644 --- a/tests/unit/agents/shared/modelResolution.test.ts +++ b/tests/unit/agents/shared/modelResolution.test.ts @@ -44,7 +44,7 @@ vi.mock('../../../../src/agents/definitions/loader.js', () => ({ 'respond-to-planning-comment', 'debug', ]), - getKnownAgentTypes: vi.fn().mockReturnValue([]), + getBuiltinAgentTypes: vi.fn().mockReturnValue([]), })); // Also mock the index re-export @@ -63,7 +63,7 @@ vi.mock('../../../../src/agents/definitions/index.js', () => ({ 'respond-to-planning-comment', 'debug', ]), - getKnownAgentTypes: vi.fn().mockReturnValue([]), + getBuiltinAgentTypes: vi.fn().mockReturnValue([]), })); // Mock getAgentConfigPrompts (project-level prompt lookup) diff --git a/tests/unit/api/routers/agentConfigs.test.ts b/tests/unit/api/routers/agentConfigs.test.ts index 561f8a6e..92aecc4a 100644 --- a/tests/unit/api/routers/agentConfigs.test.ts +++ b/tests/unit/api/routers/agentConfigs.test.ts @@ -33,7 +33,7 @@ const { mockLoadPartials: vi.fn(), mockResolveAgentDefinition: vi.fn(), mockGetRawTemplate: vi.fn(), - mockGetDefaultTaskPrompt: vi.fn().mockReturnValue(null), + mockGetDefaultTaskPrompt: vi.fn().mockResolvedValue(null), })); vi.mock('../../../../src/db/repositories/settingsRepository.js', () => ({ @@ -612,7 +612,7 @@ describe('agentConfigsRouter', () => { }, }); mockGetRawTemplate.mockReturnValue('raw disk template content'); - mockGetDefaultTaskPrompt.mockReturnValue('yaml default task prompt'); + mockGetDefaultTaskPrompt.mockResolvedValue('yaml default task prompt'); const caller = createCaller({ user: mockUser, effectiveOrgId: mockUser.orgId }); const result = await caller.getPrompts({ projectId: 'proj-1', agentType: 'implementation' }); diff --git a/tests/unit/api/routers/agentDefinitions.test.ts b/tests/unit/api/routers/agentDefinitions.test.ts index 55c1c23c..b04a2655 100644 --- a/tests/unit/api/routers/agentDefinitions.test.ts +++ b/tests/unit/api/routers/agentDefinitions.test.ts @@ -8,10 +8,10 @@ import { createCallerFor, expectTRPCError } from '../../../helpers/trpcTestHarne // --------------------------------------------------------------------------- const { - mockGetKnownAgentTypes, + mockGetBuiltinAgentTypes, mockIsBuiltinAgentType, mockInvalidateDefinitionCache, - mockLoadAgentDefinition, + mockLoadBuiltinDefinition, mockResolveAgentDefinition, mockResolveKnownAgentTypes, mockListAgentDefinitions, @@ -22,10 +22,10 @@ const { mockValidateTemplate, mockLoadPartials, } = vi.hoisted(() => ({ - mockGetKnownAgentTypes: vi.fn<() => string[]>(), + mockGetBuiltinAgentTypes: vi.fn<() => string[]>(), mockIsBuiltinAgentType: vi.fn<(agentType: string) => boolean>(), mockInvalidateDefinitionCache: vi.fn(), - mockLoadAgentDefinition: vi.fn<(agentType: string) => AgentDefinition>(), + mockLoadBuiltinDefinition: vi.fn<(agentType: string) => AgentDefinition>(), mockResolveAgentDefinition: vi.fn<(agentType: string) => Promise>(), mockResolveKnownAgentTypes: vi.fn<() => Promise>(), mockListAgentDefinitions: vi.fn(), @@ -38,10 +38,10 @@ const { })); vi.mock('../../../../src/agents/definitions/loader.js', () => ({ - getKnownAgentTypes: mockGetKnownAgentTypes, + getBuiltinAgentTypes: mockGetBuiltinAgentTypes, isBuiltinAgentType: mockIsBuiltinAgentType, invalidateDefinitionCache: mockInvalidateDefinitionCache, - loadAgentDefinition: mockLoadAgentDefinition, + loadBuiltinDefinition: mockLoadBuiltinDefinition, resolveAgentDefinition: mockResolveAgentDefinition, resolveKnownAgentTypes: mockResolveKnownAgentTypes, })); @@ -111,7 +111,7 @@ function createMockDefinition(overrides?: Partial): AgentDefini describe('agentDefinitionsRouter', () => { beforeEach(() => { - mockGetKnownAgentTypes.mockReturnValue(['implementation', 'review']); + mockGetBuiltinAgentTypes.mockReturnValue(['implementation', 'review']); mockIsBuiltinAgentType.mockImplementation((agentType: string) => ['implementation', 'review'].includes(agentType), ); @@ -129,7 +129,7 @@ describe('agentDefinitionsRouter', () => { { agentType: 'implementation', definition: dbDef, isBuiltin: true }, ]); const yamlDef = createMockDefinition({ hint: 'from yaml' }); - mockLoadAgentDefinition.mockReturnValue(yamlDef); + mockLoadBuiltinDefinition.mockReturnValue(yamlDef); const caller = createCaller({ user: mockSuperAdmin, effectiveOrgId: mockSuperAdmin.orgId }); const result = await caller.list(); @@ -152,7 +152,7 @@ describe('agentDefinitionsRouter', () => { it('falls back to YAML only when DB fails', async () => { mockListAgentDefinitions.mockRejectedValue(new Error('DB down')); const yamlDef = createMockDefinition(); - mockLoadAgentDefinition.mockReturnValue(yamlDef); + mockLoadBuiltinDefinition.mockReturnValue(yamlDef); const caller = createCaller({ user: mockSuperAdmin, effectiveOrgId: mockSuperAdmin.orgId }); const result = await caller.list(); @@ -169,7 +169,7 @@ describe('agentDefinitionsRouter', () => { { agentType: 'custom-agent', definition: customDef, isBuiltin: false }, ]); const yamlDef = createMockDefinition(); - mockLoadAgentDefinition.mockReturnValue(yamlDef); + mockLoadBuiltinDefinition.mockReturnValue(yamlDef); const caller = createCaller({ user: mockSuperAdmin, effectiveOrgId: mockSuperAdmin.orgId }); const result = await caller.list(); @@ -184,7 +184,7 @@ describe('agentDefinitionsRouter', () => { it('does not call listAgentDefinitions twice (no redundant DB query)', async () => { mockListAgentDefinitions.mockResolvedValue([]); - mockLoadAgentDefinition.mockReturnValue(createMockDefinition()); + mockLoadBuiltinDefinition.mockReturnValue(createMockDefinition()); const caller = createCaller({ user: mockSuperAdmin, effectiveOrgId: mockSuperAdmin.orgId }); await caller.list(); @@ -349,7 +349,7 @@ describe('agentDefinitionsRouter', () => { describe('delete', () => { it('deletes a non-builtin definition (superadmin)', async () => { mockGetAgentDefinition.mockResolvedValue(createMockDefinition()); - mockGetKnownAgentTypes.mockReturnValue(['implementation', 'review']); // custom-agent is NOT in this list + mockGetBuiltinAgentTypes.mockReturnValue(['implementation', 'review']); // custom-agent is NOT in this list mockDeleteAgentDefinition.mockResolvedValue(undefined); const caller = createCaller({ user: mockSuperAdmin, effectiveOrgId: mockSuperAdmin.orgId }); @@ -397,14 +397,14 @@ describe('agentDefinitionsRouter', () => { describe('reset', () => { it('resets a builtin definition to YAML default (superadmin)', async () => { const yamlDef = createMockDefinition({ hint: 'yaml default' }); - mockLoadAgentDefinition.mockReturnValue(yamlDef); + mockLoadBuiltinDefinition.mockReturnValue(yamlDef); mockUpsertAgentDefinition.mockResolvedValue(undefined); const caller = createCaller({ user: mockSuperAdmin, effectiveOrgId: mockSuperAdmin.orgId }); const result = await caller.reset({ agentType: 'implementation' }); expect(result).toEqual({ agentType: 'implementation' }); - expect(mockLoadAgentDefinition).toHaveBeenCalledWith('implementation'); + expect(mockLoadBuiltinDefinition).toHaveBeenCalledWith('implementation'); expect(mockUpsertAgentDefinition).toHaveBeenCalledWith('implementation', yamlDef, true); // Cache should be invalidated twice (before YAML reload and after upsert) expect(mockInvalidateDefinitionCache).toHaveBeenCalledTimes(2); @@ -564,7 +564,7 @@ describe('agentDefinitionsRouter', () => { }); mockResolveAgentDefinition.mockResolvedValue(current); const yamlDef = createMockDefinition({ prompts: { taskPrompt: 'yaml task' } }); - mockLoadAgentDefinition.mockReturnValue(yamlDef); + mockLoadBuiltinDefinition.mockReturnValue(yamlDef); mockGetRawTemplate.mockReturnValue('## System prompt from .eta'); mockUpsertAgentDefinition.mockResolvedValue(undefined); @@ -590,7 +590,7 @@ describe('agentDefinitionsRouter', () => { const yamlDef = createMockDefinition({ prompts: { taskPrompt: 'yaml task', systemPrompt: 'yaml system' }, }); - mockLoadAgentDefinition.mockReturnValue(yamlDef); + mockLoadBuiltinDefinition.mockReturnValue(yamlDef); mockUpsertAgentDefinition.mockResolvedValue(undefined); const caller = createCaller({ user: mockSuperAdmin, effectiveOrgId: mockSuperAdmin.orgId }); @@ -610,7 +610,7 @@ describe('agentDefinitionsRouter', () => { const current = createMockDefinition(); mockResolveAgentDefinition.mockResolvedValue(current); const yamlDef = createMockDefinition({ prompts: { taskPrompt: 'yaml task' } }); - mockLoadAgentDefinition.mockReturnValue(yamlDef); + mockLoadBuiltinDefinition.mockReturnValue(yamlDef); mockGetRawTemplate.mockImplementation(() => { throw new Error('no .eta file'); }); @@ -640,7 +640,7 @@ describe('agentDefinitionsRouter', () => { it('throws NOT_FOUND when YAML default does not exist', async () => { const current = createMockDefinition(); mockResolveAgentDefinition.mockResolvedValue(current); - mockLoadAgentDefinition.mockImplementation(() => { + mockLoadBuiltinDefinition.mockImplementation(() => { throw new Error('yaml not found'); }); @@ -667,7 +667,7 @@ describe('agentDefinitionsRouter', () => { const current = createMockDefinition(); mockResolveAgentDefinition.mockResolvedValue(current); const yamlDef = createMockDefinition({ prompts: { taskPrompt: 'yaml task' } }); - mockLoadAgentDefinition.mockReturnValue(yamlDef); + mockLoadBuiltinDefinition.mockReturnValue(yamlDef); mockGetRawTemplate.mockReturnValue('system prompt'); mockUpsertAgentDefinition.mockResolvedValue(undefined); diff --git a/tests/unit/api/routers/agentTriggerConfigs.getProjectTriggersView.test.ts b/tests/unit/api/routers/agentTriggerConfigs.getProjectTriggersView.test.ts index 240086bd..d2ab21e2 100644 --- a/tests/unit/api/routers/agentTriggerConfigs.getProjectTriggersView.test.ts +++ b/tests/unit/api/routers/agentTriggerConfigs.getProjectTriggersView.test.ts @@ -10,16 +10,16 @@ const { mockListAgentDefinitions, mockGetTriggerConfigsByProject, mockListProjectIntegrations, - mockGetKnownAgentTypes, - mockLoadAgentDefinition, + mockGetBuiltinAgentTypes, + mockLoadBuiltinDefinition, mockListAgentConfigs, mockVerifyProjectOrgAccess, } = vi.hoisted(() => ({ mockListAgentDefinitions: vi.fn(), mockGetTriggerConfigsByProject: vi.fn(), mockListProjectIntegrations: vi.fn(), - mockGetKnownAgentTypes: vi.fn(), - mockLoadAgentDefinition: vi.fn(), + mockGetBuiltinAgentTypes: vi.fn(), + mockLoadBuiltinDefinition: vi.fn(), mockListAgentConfigs: vi.fn(), mockVerifyProjectOrgAccess: vi.fn(), })); @@ -48,8 +48,8 @@ vi.mock('../../../../src/db/repositories/agentConfigsRepository.js', () => ({ })); vi.mock('../../../../src/agents/definitions/loader.js', () => ({ - getKnownAgentTypes: mockGetKnownAgentTypes, - loadAgentDefinition: mockLoadAgentDefinition, + getBuiltinAgentTypes: mockGetBuiltinAgentTypes, + loadBuiltinDefinition: mockLoadBuiltinDefinition, })); vi.mock('../../../../src/api/routers/_shared/projectAccess.js', () => ({ @@ -97,8 +97,8 @@ describe('agentTriggerConfigsRouter — getProjectTriggersView', () => { mockGetTriggerConfigsByProject.mockResolvedValue([]); mockListProjectIntegrations.mockResolvedValue([]); mockListAgentDefinitions.mockResolvedValue([]); - mockGetKnownAgentTypes.mockReturnValue([]); - mockLoadAgentDefinition.mockReturnValue(makeAgentDefinition()); + mockGetBuiltinAgentTypes.mockReturnValue([]); + mockLoadBuiltinDefinition.mockReturnValue(makeAgentDefinition()); // Default: no agent configs (all agents are unconfigured / available) mockListAgentConfigs.mockResolvedValue([]); }); @@ -286,8 +286,8 @@ describe('agentTriggerConfigsRouter — getProjectTriggersView', () => { it('is resilient to DB failure when loading agent definitions', async () => { mockListAgentDefinitions.mockRejectedValue(new Error('DB connection failed')); // Falls back to YAML — need some types for that - mockGetKnownAgentTypes.mockReturnValue(['implementation']); - mockLoadAgentDefinition.mockReturnValue(makeAgentDefinition()); + mockGetBuiltinAgentTypes.mockReturnValue(['implementation']); + mockLoadBuiltinDefinition.mockReturnValue(makeAgentDefinition()); mockListAgentConfigs.mockResolvedValue([{ agentType: 'implementation', id: 1 }]); const caller = createCaller(mockCtx); @@ -302,7 +302,7 @@ describe('agentTriggerConfigsRouter — getProjectTriggersView', () => { const definition = makeAgentDefinition(); mockListAgentDefinitions.mockResolvedValue([{ agentType: 'implementation', definition }]); // YAML also has 'implementation' - mockGetKnownAgentTypes.mockReturnValue(['implementation']); + mockGetBuiltinAgentTypes.mockReturnValue(['implementation']); mockListAgentConfigs.mockResolvedValue([{ agentType: 'implementation', id: 1 }]); const caller = createCaller(mockCtx); @@ -314,8 +314,8 @@ describe('agentTriggerConfigsRouter — getProjectTriggersView', () => { it('enabled agents appear in enabledAgents; unconfigured appear in availableAgents', async () => { mockListAgentDefinitions.mockResolvedValue([]); // no DB definitions - mockGetKnownAgentTypes.mockReturnValue(['splitting', 'planning']); - mockLoadAgentDefinition.mockReturnValue(makeAgentDefinition()); + mockGetBuiltinAgentTypes.mockReturnValue(['splitting', 'planning']); + mockLoadBuiltinDefinition.mockReturnValue(makeAgentDefinition()); // Only 'splitting' is enabled mockListAgentConfigs.mockResolvedValue([{ agentType: 'splitting', id: 1 }]); @@ -329,8 +329,8 @@ describe('agentTriggerConfigsRouter — getProjectTriggersView', () => { }); it('handles YAML load failure gracefully (skips that agent)', async () => { - mockGetKnownAgentTypes.mockReturnValue(['implementation', 'failing-agent']); - mockLoadAgentDefinition + mockGetBuiltinAgentTypes.mockReturnValue(['implementation', 'failing-agent']); + mockLoadBuiltinDefinition .mockReturnValueOnce(makeAgentDefinition()) .mockImplementationOnce(() => { throw new Error('YAML parse error'); diff --git a/tests/unit/backends/llmist.test.ts b/tests/unit/backends/llmist.test.ts index 1e2ecc08..d1f88167 100644 --- a/tests/unit/backends/llmist.test.ts +++ b/tests/unit/backends/llmist.test.ts @@ -15,7 +15,7 @@ vi.mock('../../../src/agents/capabilities/index.js', () => ({ // backends/llmist → definitions → strategies → gadgets → pm/ → webhook-handler // → triggers/agent-execution → agents/registry → new LlmistEngine() (still loading) vi.mock('../../../src/agents/definitions/index.js', () => ({ - loadAgentDefinition: vi.fn(() => ({ engine: {} })), + loadBuiltinDefinition: vi.fn(() => ({ engine: {} })), resolveAgentDefinition: vi.fn(async () => ({ engine: {} })), })); From 42b1fd0c283a40cee392e1e5b1c627b79681c528 Mon Sep 17 00:00:00 2001 From: aaight Date: Sat, 4 Apr 2026 16:43:03 +0200 Subject: [PATCH 16/52] refactor(agents): remove LegacyCapabilities shim and migrate callers to capability strings (#1084) Co-authored-by: Cascade Bot --- src/agents/shared/builderFactory.ts | 4 +- src/agents/shared/capabilities.ts | 83 ------------------- tests/unit/agents/definitions/loader.test.ts | 43 ++++------ .../unit/agents/shared/builderFactory.test.ts | 22 ++--- 4 files changed, 25 insertions(+), 127 deletions(-) delete mode 100644 src/agents/shared/capabilities.ts diff --git a/src/agents/shared/builderFactory.ts b/src/agents/shared/builderFactory.ts index a214c39c..ddfb88a7 100644 --- a/src/agents/shared/builderFactory.ts +++ b/src/agents/shared/builderFactory.ts @@ -14,7 +14,7 @@ import { initSessionState, type SessionHooks, setReadOnlyFs } from '../../gadget import type { LLMCallLogger } from '../../utils/llmLogging.js'; import { resolveSquintDbPath } from '../../utils/squintDb.js'; import type { IProgressMonitor } from '../contracts/index.js'; -import { getAgentCapabilities } from '../shared/capabilities.js'; +import { getAgentCapabilities } from '../definitions/index.js'; import { type AccumulatedLlmCall, createObserverHooks } from '../utils/hooks.js'; import type { TrackingContext } from '../utils/tracking.js'; @@ -104,7 +104,7 @@ export async function createConfiguredBuilder(options: CreateBuilderOptions): Pr // Mark session as read-only if agent lacks fs:write capability const caps = await getAgentCapabilities(agentType); - if (caps.isReadOnly) { + if (![...caps.required, ...caps.optional].includes('fs:write')) { setReadOnlyFs(true); } } diff --git a/src/agents/shared/capabilities.ts b/src/agents/shared/capabilities.ts deleted file mode 100644 index f1cab599..00000000 --- a/src/agents/shared/capabilities.ts +++ /dev/null @@ -1,83 +0,0 @@ -/** - * Agent Capabilities - * - * Re-exports capability types and functions from the new capability registry. - * - * This file is kept for backward compatibility. New code should import from: - * - '../capabilities/index.js' for full capability system - * - '../definitions/schema.js' for AgentCapabilities type - */ - -// Re-export capability functions -export { - buildGadgetsFromCapabilities, - CAPABILITIES, - CAPABILITY_REGISTRY, - deriveIntegrations, - deriveRequiredIntegrations, - filterToolManifests, - generateUnavailableCapabilitiesNote, - getCapabilitiesByIntegration, - getCapabilityIntegration, - getGadgetNamesFromCapabilities, - getSdkToolsFromCapabilities, - getUnavailableOptionalCapabilities, - isBuiltInCapability, - isValidCapability, - resolveEffectiveCapabilities, -} from '../capabilities/index.js'; -// Re-export capability types -export type { AgentCapabilities, Capability } from '../definitions/schema.js'; - -import { resolveAgentDefinition } from '../definitions/index.js'; - -/** - * Legacy interface for derived capability flags. - * Used by code that needs boolean capability checks. - */ -export interface LegacyCapabilities { - canEditFiles: boolean; - canCreatePR: boolean; - canUpdateChecklists: boolean; - isReadOnly: boolean; -} - -/** - * Get legacy capability flags for an agent type. - * - * Derives boolean capability flags from the new capability array format: - * - canEditFiles = has 'fs:write' - * - canCreatePR = has 'scm:pr' - * - canUpdateChecklists = has 'pm:checklist' - * - isReadOnly = does not have 'fs:write' - * - * For unknown agent types, returns full-access defaults to maintain - * backward compatibility. - */ -export async function getAgentCapabilities(agentType: string): Promise { - try { - const def = await resolveAgentDefinition(agentType); - const allCaps = [...def.capabilities.required, ...def.capabilities.optional]; - - return { - canEditFiles: allCaps.includes('fs:write'), - canCreatePR: allCaps.includes('scm:pr'), - canUpdateChecklists: allCaps.includes('pm:checklist'), - isReadOnly: !allCaps.includes('fs:write'), - }; - } catch (error) { - // Only fall back to full access for "agent not found" errors. - // Re-throw unexpected errors to avoid masking bugs with elevated privileges. - const message = error instanceof Error ? error.message : String(error); - if (message.includes('not found')) { - // Unknown agent type - return full-access defaults for backward compatibility - return { - canEditFiles: true, - canCreatePR: true, - canUpdateChecklists: true, - isReadOnly: false, - }; - } - throw error; - } -} diff --git a/tests/unit/agents/definitions/loader.test.ts b/tests/unit/agents/definitions/loader.test.ts index fb4631ed..c194c906 100644 --- a/tests/unit/agents/definitions/loader.test.ts +++ b/tests/unit/agents/definitions/loader.test.ts @@ -3,6 +3,7 @@ import { deriveIntegrations, getSdkToolsFromCapabilities, } from '../../../../src/agents/capabilities/resolver.js'; +import { getAgentCapabilities } from '../../../../src/agents/definitions/index.js'; import { clearDefinitionCache, getBuiltinAgentTypes, @@ -11,7 +12,6 @@ import { loadBuiltinDefinition, } from '../../../../src/agents/definitions/loader.js'; import { CONTEXT_STEP_REGISTRY } from '../../../../src/agents/definitions/strategies.js'; -import { getAgentCapabilities } from '../../../../src/agents/shared/capabilities.js'; const ALL_AGENT_TYPES = [ 'alerting', @@ -302,11 +302,11 @@ describe('YAML agent definitions loader', () => { it('implementation agent has full capabilities and stop hooks', async () => { const def = loadBuiltinDefinition('implementation'); const caps = await getAgentCapabilities('implementation'); + const allCaps = [...caps.required, ...caps.optional]; - expect(caps.canEditFiles).toBe(true); - expect(caps.canCreatePR).toBe(true); - expect(caps.canUpdateChecklists).toBe(true); - expect(caps.isReadOnly).toBe(false); + expect(allCaps.includes('fs:write')).toBe(true); + expect(allCaps.includes('scm:pr')).toBe(true); + expect(allCaps.includes('pm:checklist')).toBe(true); expect(def.hooks?.finish?.scm?.requiresPR).toBe(true); expect(def.integrations?.required).toContain('scm'); }); @@ -314,9 +314,9 @@ describe('YAML agent definitions loader', () => { it('review agent is read-only', async () => { const def = loadBuiltinDefinition('review'); const caps = await getAgentCapabilities('review'); + const allCaps = [...caps.required, ...caps.optional]; - expect(caps.canEditFiles).toBe(false); - expect(caps.isReadOnly).toBe(true); + expect(allCaps.includes('fs:write')).toBe(false); expect(def.hooks?.finish?.scm?.requiresReview).toBe(true); expect(def.integrations?.required).toContain('scm'); }); @@ -324,8 +324,9 @@ describe('YAML agent definitions loader', () => { it('respond-to-ci agent requires scm integration', async () => { const def = loadBuiltinDefinition('respond-to-ci'); const caps = await getAgentCapabilities('respond-to-ci'); + const allCaps = [...caps.required, ...caps.optional]; - expect(caps.canEditFiles).toBe(true); + expect(allCaps.includes('fs:write')).toBe(true); expect(def.integrations?.required).toContain('scm'); }); @@ -333,32 +334,18 @@ describe('YAML agent definitions loader', () => { for (const agentType of ALL_AGENT_TYPES) { const def = loadBuiltinDefinition(agentType); const caps = await getAgentCapabilities(agentType); - const allCaps = [...def.capabilities.required, ...def.capabilities.optional]; - - // canEditFiles = has fs:write - expect(caps.canEditFiles).toBe(allCaps.includes('fs:write')); - - // canCreatePR = has scm:pr - expect(caps.canCreatePR).toBe(allCaps.includes('scm:pr')); + const allCaps = [...caps.required, ...caps.optional]; + const defAllCaps = [...def.capabilities.required, ...def.capabilities.optional]; - // canUpdateChecklists = has pm:checklist - expect(caps.canUpdateChecklists).toBe(allCaps.includes('pm:checklist')); - - // isReadOnly = no fs:write - expect(caps.isReadOnly).toBe(!allCaps.includes('fs:write')); + // capabilities should match the definition + expect(allCaps).toEqual(defAllCaps); } }); }); describe('unknown agent type fallbacks', () => { - it('getAgentCapabilities returns full-access defaults for unknown type', async () => { - const caps = await getAgentCapabilities('nonexistent-agent-type'); - expect(caps).toEqual({ - canEditFiles: true, - canCreatePR: true, - canUpdateChecklists: true, - isReadOnly: false, - }); + it('getAgentCapabilities throws for unknown agent type', async () => { + await expect(getAgentCapabilities('nonexistent-agent-type')).rejects.toThrow(); }); }); diff --git a/tests/unit/agents/shared/builderFactory.test.ts b/tests/unit/agents/shared/builderFactory.test.ts index 91942653..07a122cb 100644 --- a/tests/unit/agents/shared/builderFactory.test.ts +++ b/tests/unit/agents/shared/builderFactory.test.ts @@ -29,12 +29,10 @@ vi.mock('../../../../src/gadgets/sessionState.js', async (importOriginal) => { }; }); -vi.mock('../../../../src/agents/shared/capabilities.js', () => ({ +vi.mock('../../../../src/agents/definitions/index.js', () => ({ getAgentCapabilities: vi.fn().mockResolvedValue({ - canEditFiles: true, - canCreatePR: true, - canUpdateChecklists: true, - isReadOnly: false, + required: ['fs:read', 'fs:write', 'shell:exec', 'session:ctrl'], + optional: [], }), })); @@ -80,11 +78,11 @@ vi.mock('llmist', () => ({ import { execSync } from 'node:child_process'; import { AgentBuilder, BudgetPricingUnavailableError } from 'llmist'; +import { getAgentCapabilities } from '../../../../src/agents/definitions/index.js'; import { createConfiguredBuilder, isSquintEnabled, } from '../../../../src/agents/shared/builderFactory.js'; -import { getAgentCapabilities } from '../../../../src/agents/shared/capabilities.js'; import { initSessionState, setReadOnlyFs } from '../../../../src/gadgets/sessionState.js'; import { resolveSquintDbPath } from '../../../../src/utils/squintDb.js'; @@ -322,10 +320,8 @@ describe('createConfiguredBuilder', () => { it('calls setReadOnlyFs(true) when agent is read-only', async () => { mockGetAgentCapabilities.mockResolvedValueOnce({ - canEditFiles: false, - canCreatePR: false, - canUpdateChecklists: false, - isReadOnly: true, + required: ['fs:read', 'session:ctrl'], + optional: [], }); const options = createBaseOptions({ agentType: 'review' }); await createConfiguredBuilder(options); @@ -334,10 +330,8 @@ describe('createConfiguredBuilder', () => { it('does not call setReadOnlyFs when agent has write access', async () => { mockGetAgentCapabilities.mockResolvedValueOnce({ - canEditFiles: true, - canCreatePR: true, - canUpdateChecklists: true, - isReadOnly: false, + required: ['fs:read', 'fs:write', 'shell:exec', 'session:ctrl'], + optional: [], }); const options = createBaseOptions(); await createConfiguredBuilder(options); From 884f8ee30fc1f9bc05ebfda89a216e8630b29c3b Mon Sep 17 00:00:00 2001 From: aaight Date: Sat, 4 Apr 2026 16:53:28 +0200 Subject: [PATCH 17/52] refactor(web): decompose project-agent-configs.tsx and integration-form.tsx (#1085) Co-authored-by: Cascade Bot --- tests/unit/web/agent-config-utils.test.ts | 164 ++++ .../projects/agent-config-detail.tsx | 414 ++++++++ .../components/projects/agent-config-list.tsx | 275 ++++++ .../components/projects/agent-config-types.ts | 167 ++++ .../components/projects/agent-config-utils.ts | 40 + .../projects/integration-alerting-tab.tsx | 198 ++++ .../components/projects/integration-form.tsx | 616 +----------- .../projects/integration-scm-tab.tsx | 434 +++++++++ .../projects/project-agent-configs.tsx | 889 +----------------- 9 files changed, 1709 insertions(+), 1488 deletions(-) create mode 100644 tests/unit/web/agent-config-utils.test.ts create mode 100644 web/src/components/projects/agent-config-detail.tsx create mode 100644 web/src/components/projects/agent-config-list.tsx create mode 100644 web/src/components/projects/agent-config-types.ts create mode 100644 web/src/components/projects/agent-config-utils.ts create mode 100644 web/src/components/projects/integration-alerting-tab.tsx create mode 100644 web/src/components/projects/integration-scm-tab.tsx diff --git a/tests/unit/web/agent-config-utils.test.ts b/tests/unit/web/agent-config-utils.test.ts new file mode 100644 index 00000000..138d3680 --- /dev/null +++ b/tests/unit/web/agent-config-utils.test.ts @@ -0,0 +1,164 @@ +import { describe, expect, it } from 'vitest'; +import type { ResolvedTrigger } from '../../../src/api/routers/_shared/triggerTypes.js'; +import { + countActiveTriggers, + engineHasCredentials, +} from '../../../web/src/components/projects/agent-config-utils.js'; + +// ============================================================================ +// engineHasCredentials +// ============================================================================ + +describe('engineHasCredentials', () => { + it('returns true for unknown engines (conservative assumption)', () => { + const keys = new Set(); + expect(engineHasCredentials('unknown-engine', keys)).toBe(true); + }); + + it('returns false for codex when no credential keys are configured', () => { + const keys = new Set(); + expect(engineHasCredentials('codex', keys)).toBe(false); + }); + + it('returns true for codex when OPENAI_API_KEY is configured', () => { + const keys = new Set(['OPENAI_API_KEY']); + expect(engineHasCredentials('codex', keys)).toBe(true); + }); + + it('returns true for codex when CODEX_AUTH_JSON is configured', () => { + const keys = new Set(['CODEX_AUTH_JSON']); + expect(engineHasCredentials('codex', keys)).toBe(true); + }); + + it('returns false for claude-code when no credential keys are configured', () => { + const keys = new Set(); + expect(engineHasCredentials('claude-code', keys)).toBe(false); + }); + + it('returns true for claude-code when ANTHROPIC_API_KEY is configured', () => { + const keys = new Set(['ANTHROPIC_API_KEY']); + expect(engineHasCredentials('claude-code', keys)).toBe(true); + }); + + it('returns true for claude-code when CLAUDE_CODE_OAUTH_TOKEN is configured', () => { + const keys = new Set(['CLAUDE_CODE_OAUTH_TOKEN']); + expect(engineHasCredentials('claude-code', keys)).toBe(true); + }); + + it('returns false for opencode when no credential keys are configured', () => { + const keys = new Set(); + expect(engineHasCredentials('opencode', keys)).toBe(false); + }); + + it('returns true for opencode when OPENAI_API_KEY is configured', () => { + const keys = new Set(['OPENAI_API_KEY']); + expect(engineHasCredentials('opencode', keys)).toBe(true); + }); + + it('returns true for opencode when OPENROUTER_API_KEY is configured', () => { + const keys = new Set(['OPENROUTER_API_KEY']); + expect(engineHasCredentials('opencode', keys)).toBe(true); + }); + + it('returns false for llmist when no credential keys are configured', () => { + const keys = new Set(); + expect(engineHasCredentials('llmist', keys)).toBe(false); + }); + + it('returns true for llmist when OPENROUTER_API_KEY is configured', () => { + const keys = new Set(['OPENROUTER_API_KEY']); + expect(engineHasCredentials('llmist', keys)).toBe(true); + }); + + it('ignores unrelated keys', () => { + const keys = new Set(['SOME_OTHER_KEY', 'UNRELATED_KEY']); + expect(engineHasCredentials('claude-code', keys)).toBe(false); + }); +}); + +// ============================================================================ +// countActiveTriggers +// ============================================================================ + +describe('countActiveTriggers', () => { + const integrations = { pm: 'trello', scm: 'github' }; + + function makeTrigger(event: string, enabled: boolean, providers?: string[]): ResolvedTrigger { + return { + event, + enabled, + providers: providers ?? [], + label: event, + parameters: [], + parameterValues: {}, + } as ResolvedTrigger; + } + + it('returns 0 when there are no triggers', () => { + expect(countActiveTriggers([], integrations)).toBe(0); + }); + + it('counts only enabled triggers', () => { + const triggers = [ + makeTrigger('pm:card-created', true), + makeTrigger('pm:card-moved', false), + makeTrigger('scm:pr-opened', true), + ]; + expect(countActiveTriggers(triggers, integrations)).toBe(2); + }); + + it('counts triggers without provider restrictions normally', () => { + const triggers = [ + makeTrigger('internal:run-complete', true), + makeTrigger('internal:task-failed', true), + ]; + expect(countActiveTriggers(triggers, integrations)).toBe(2); + }); + + it('filters out enabled triggers whose provider does not match active integration', () => { + const triggers = [ + // This trigger is enabled but requires 'jira' — active pm is 'trello' + makeTrigger('pm:issue-created', true, ['jira']), + // This trigger requires 'trello' — active pm is 'trello' + makeTrigger('pm:card-created', true, ['trello']), + ]; + expect(countActiveTriggers(triggers, integrations)).toBe(1); + }); + + it('includes enabled triggers whose provider matches active integration', () => { + const triggers = [ + makeTrigger('scm:pr-opened', true, ['github']), + makeTrigger('scm:pr-merged', true, ['github']), + ]; + expect(countActiveTriggers(triggers, integrations)).toBe(2); + }); + + it('returns 0 for all disabled triggers even if provider matches', () => { + const triggers = [ + makeTrigger('pm:card-created', false, ['trello']), + makeTrigger('scm:pr-opened', false, ['github']), + ]; + expect(countActiveTriggers(triggers, integrations)).toBe(0); + }); + + it('handles null integrations gracefully', () => { + const noIntegrations = { pm: null, scm: null }; + const triggers = [ + // provider restriction — active integration is null, so no match + makeTrigger('pm:card-created', true, ['trello']), + // no provider restriction — always included + makeTrigger('internal:run-complete', true), + ]; + expect(countActiveTriggers(triggers, noIntegrations)).toBe(1); + }); + + it('counts mixed enabled/disabled triggers with provider filtering', () => { + const triggers = [ + makeTrigger('pm:card-created', true, ['trello']), // enabled, provider matches + makeTrigger('pm:card-moved', false, ['trello']), // disabled, skipped + makeTrigger('pm:issue-created', true, ['jira']), // enabled, wrong provider + makeTrigger('internal:run-complete', true), // enabled, no restriction + ]; + expect(countActiveTriggers(triggers, integrations)).toBe(2); + }); +}); diff --git a/web/src/components/projects/agent-config-detail.tsx b/web/src/components/projects/agent-config-detail.tsx new file mode 100644 index 00000000..d5b837fc --- /dev/null +++ b/web/src/components/projects/agent-config-detail.tsx @@ -0,0 +1,414 @@ +/** + * Agent detail view components: DefinitionAgentSection and AgentDetailView. + * Renders the tabbed detail panel (Engine / Prompts / Triggers) for a single agent. + */ +import { ArrowLeft } from 'lucide-react'; +import { useEffect, useMemo, useRef, useState } from 'react'; +import { EngineSettingsFields } from '@/components/settings/engine-settings-fields.js'; +import { ModelField } from '@/components/settings/model-field.js'; +import { + DefinitionTriggerToggles, + type ResolvedTrigger, +} from '@/components/shared/definition-trigger-toggles.js'; +import { Input } from '@/components/ui/input.js'; +import { Label } from '@/components/ui/label.js'; +import { + Select, + SelectContent, + SelectItem, + SelectTrigger, + SelectValue, +} from '@/components/ui/select.js'; +import { Tabs, TabsContent, TabsList, TabsTrigger } from '@/components/ui/tabs.js'; +import { AGENT_LABELS, CATEGORY_LABELS } from '@/lib/trigger-agent-mapping.js'; +import type { AgentDetailViewProps, DefinitionAgentSectionProps } from './agent-config-types.js'; +import { AgentPromptOverrides } from './agent-prompt-overrides.js'; + +// biome-ignore lint/complexity/noExcessiveCognitiveComplexity: tabbed detail panel managing Engine/Prompts/Triggers tabs with per-tab state, mutations, and trigger category grouping +function DefinitionAgentSection({ + agentType, + projectId, + config, + triggers, + integrations, + engines, + isSaving, + onSaveConfig, + saveSuccessNonce, + onDeleteConfig, + onTriggerToggle, + onTriggerParamChange, + projectModel, + projectEngine, + projectMaxIterations, + systemDefaults, +}: DefinitionAgentSectionProps) { + const [saved, setSaved] = useState(false); + const savedTimerRef = useRef | null>(null); + // Tracks whether a successful save is in flight (prevents config sync from clearing "Saved") + const justSavedRef = useRef(false); + + // Local form state — engine fields + const [model, setModel] = useState(config?.model ?? ''); + const [maxIterations, setMaxIterations] = useState(config?.maxIterations?.toString() ?? ''); + const [agentEngine, setAgentEngine] = useState(config?.agentEngine ?? ''); + const [maxConcurrency, setMaxConcurrency] = useState(config?.maxConcurrency?.toString() ?? ''); + const [engineSettings, setEngineSettings] = useState< + Record> | undefined + >(config?.agentEngineSettings ?? undefined); + + // Local form state — prompt fields (initialized by AgentPromptOverrides component) + const [systemPrompt, setSystemPrompt] = useState(config?.systemPrompt ?? ''); + const [taskPrompt, setTaskPrompt] = useState(config?.taskPrompt ?? ''); + // Track whether the user explicitly cleared a prompt override so we can send null on save + // instead of the fallback display text (which would create a duplicate "custom" override). + const [systemPromptCleared, setSystemPromptCleared] = useState(false); + const [taskPromptCleared, setTaskPromptCleared] = useState(false); + + const effectiveEngineId = agentEngine || ''; + const effectiveEngine = engines.find((engine) => engine.id === effectiveEngineId); + + // Resolved inherited engine — project override or system default + const inheritedEngine = projectEngine ?? systemDefaults?.agentEngine ?? 'claude-code'; + // Per-field engine defaults for the EngineSettingsFields component + const engineDefaults = + systemDefaults && effectiveEngineId + ? systemDefaults.engineSettings[effectiveEngineId] + : undefined; + + // Resolved inherited model and iterations (walk the chain: project → system) + const inheritedModel = projectModel ?? systemDefaults?.model; + const inheritedMaxIterations = projectMaxIterations ?? systemDefaults?.maxIterations; + + // Sync form state when config changes (e.g. after invalidateQueries refetch) + // Skip clearing "Saved" if we just saved — the nonce effect will handle the timer + useEffect(() => { + setModel(config?.model ?? ''); + setMaxIterations(config?.maxIterations?.toString() ?? ''); + setAgentEngine(config?.agentEngine ?? ''); + setMaxConcurrency(config?.maxConcurrency?.toString() ?? ''); + setEngineSettings(config?.agentEngineSettings ?? undefined); + setSystemPrompt(config?.systemPrompt ?? ''); + setTaskPrompt(config?.taskPrompt ?? ''); + setSystemPromptCleared(false); + setTaskPromptCleared(false); + if (justSavedRef.current) { + justSavedRef.current = false; + } else { + setSaved(false); + } + }, [config]); + + // Show "Saved" indicator only after confirmed persistence (nonce increments on each success) + useEffect(() => { + if (saveSuccessNonce === 0) return; + // Mark that a save just completed so the config sync effect won't clear the indicator + justSavedRef.current = true; + if (savedTimerRef.current !== null) { + clearTimeout(savedTimerRef.current); + } + setSaved(true); + savedTimerRef.current = setTimeout(() => setSaved(false), 2000); + }, [saveSuccessNonce]); + + // Clean up the "Saved" timer on unmount to avoid state updates on unmounted component + useEffect(() => { + return () => { + if (savedTimerRef.current !== null) { + clearTimeout(savedTimerRef.current); + } + }; + }, []); + + // Group triggers by category and filter by active integrations + const triggersByCategory = useMemo(() => { + const groups: Record = { + pm: [], + scm: [], + internal: [], + }; + + for (const trigger of triggers) { + // Extract category from event (e.g., "pm:card-moved" -> "pm") + const [category] = trigger.event.split(':'); + if (category in groups) { + // Filter by provider if the trigger has provider restrictions + if (trigger.providers && trigger.providers.length > 0) { + const activeProvider = integrations[category as keyof typeof integrations]; + const matchesProvider = trigger.providers.some((p) => p === activeProvider); + if (!matchesProvider) continue; + } + groups[category].push(trigger); + } + } + + return groups; + }, [triggers, integrations]); + + const hasTriggers = + triggersByCategory.pm.length > 0 || + triggersByCategory.scm.length > 0 || + triggersByCategory.internal.length > 0; + + const handleSave = () => { + onSaveConfig(agentType, config?.id ?? null, { + model, + maxIterations, + agentEngine, + maxConcurrency, + engineSettings, + systemPrompt, + taskPrompt, + systemPromptCleared, + taskPromptCleared, + }); + }; + + const handleCancel = () => { + setModel(config?.model ?? ''); + setMaxIterations(config?.maxIterations?.toString() ?? ''); + setAgentEngine(config?.agentEngine ?? ''); + setMaxConcurrency(config?.maxConcurrency?.toString() ?? ''); + setEngineSettings(config?.agentEngineSettings ?? undefined); + setSystemPrompt(config?.systemPrompt ?? ''); + setTaskPrompt(config?.taskPrompt ?? ''); + setSystemPromptCleared(false); + setTaskPromptCleared(false); + }; + + const handleDelete = () => { + if (config && window.confirm('Delete this agent config?')) { + onDeleteConfig(config.id); + } + }; + + return ( +
+ + + Engine + Prompts + Triggers + + + {/* Engine Tab */} + +
+ + +
+
+ + +
+ {effectiveEngine && ( + + )} +
+
+ + setMaxIterations(e.target.value)} + placeholder={ + inheritedMaxIterations !== undefined + ? `${inheritedMaxIterations} (inherited)` + : 'Optional' + } + /> +
+
+ + setMaxConcurrency(e.target.value)} + placeholder="Optional" + /> +
+
+
+ + {/* Prompts Tab */} + + { + setSystemPrompt(v); + // User is editing manually — cancel any pending clear + setSystemPromptCleared(false); + }} + taskPrompt={taskPrompt} + onTaskPromptChange={(v) => { + setTaskPrompt(v); + // User is editing manually — cancel any pending clear + setTaskPromptCleared(false); + }} + onSystemPromptClear={() => setSystemPromptCleared(true)} + onTaskPromptClear={() => setTaskPromptCleared(true)} + /> + + + {/* Triggers Tab */} + + {(['pm', 'scm', 'internal'] as const).map((category) => { + const categoryTriggers = triggersByCategory[category]; + if (categoryTriggers.length === 0) return null; + + return ( +
+

+ {CATEGORY_LABELS[category] ?? category} Triggers +

+ onTriggerToggle(agentType, event, enabled)} + onParamChange={(event, params) => { + // Find the current trigger to get its enabled state + const currentTrigger = categoryTriggers.find((t) => t.event === event); + onTriggerParamChange(agentType, event, params, currentTrigger?.enabled ?? true); + }} + idPrefix={`${agentType}-${category}`} + /> +
+ ); + })} + + {!hasTriggers && ( +

+ No trigger configuration for this agent. +

+ )} +
+
+ + {/* Footer actions — outside tabs, applies globally */} +
+
+ + + {saved && Saved} +
+ {config && ( + + )} +
+
+ ); +} + +export function AgentDetailView({ + agentType, + projectId, + config, + triggers, + integrations, + engines, + isSaving, + onSaveConfig, + saveSuccessNonce, + onDeleteConfig, + onTriggerToggle, + onTriggerParamChange, + onBack, + projectModel, + projectEngine, + projectMaxIterations, + systemDefaults, +}: AgentDetailViewProps) { + const label = (AGENT_LABELS as Record)[agentType] ?? agentType; + + return ( +
+
+ +
+
+

{label}

+

+ Configure model, engine, and trigger settings for the {label} agent. +

+
+ { + onDeleteConfig(id); + onBack(); + }} + onTriggerToggle={onTriggerToggle} + onTriggerParamChange={onTriggerParamChange} + projectModel={projectModel} + projectEngine={projectEngine} + projectMaxIterations={projectMaxIterations} + systemDefaults={systemDefaults} + /> +
+ ); +} diff --git a/web/src/components/projects/agent-config-list.tsx b/web/src/components/projects/agent-config-list.tsx new file mode 100644 index 00000000..bb5c1d72 --- /dev/null +++ b/web/src/components/projects/agent-config-list.tsx @@ -0,0 +1,275 @@ +/** + * Agent list view components: AgentRow and AgentListView. + * Renders the table of configured agents and the list of available agents to enable. + */ +import { AlertTriangle, ChevronRight, Trash2 } from 'lucide-react'; +import { useState } from 'react'; +import { + AlertDialog, + AlertDialogAction, + AlertDialogCancel, + AlertDialogContent, + AlertDialogDescription, + AlertDialogFooter, + AlertDialogHeader, + AlertDialogTitle, +} from '@/components/ui/alert-dialog.js'; +import { Badge } from '@/components/ui/badge.js'; +import { + Table, + TableBody, + TableCell, + TableHead, + TableHeader, + TableRow, +} from '@/components/ui/table.js'; +import { + Tooltip, + TooltipContent, + TooltipProvider, + TooltipTrigger, +} from '@/components/ui/tooltip.js'; +import { AGENT_LABELS } from '@/lib/trigger-agent-mapping.js'; +import type { AgentListViewProps, AgentRowProps } from './agent-config-types.js'; +import { countActiveTriggers, engineHasCredentials } from './agent-config-utils.js'; + +// biome-ignore lint/complexity/noExcessiveCognitiveComplexity: table row with multiple computed display values (model, engine, trigger count) and layered inheritance fallbacks +export function AgentRow({ + type, + config, + triggers, + integrations, + onSelect, + onDeleteRequest, + projectModel, + projectEngine, + systemDefaults, + configuredCredentialKeys, +}: AgentRowProps) { + const label = (AGENT_LABELS as Record)[type] ?? type; + const activeTriggerCount = countActiveTriggers(triggers, integrations); + const modelInfo = config?.model ?? null; + const engineInfo = config?.agentEngine ?? null; + const hasCustomEngineSettings = + config?.agentEngineSettings != null && Object.keys(config.agentEngineSettings).length > 0; + + // Fallback display: show inherited model/engine when agent has no specific override + const inheritedModel = projectModel ?? systemDefaults?.model ?? null; + const inheritedEngine = projectEngine ?? systemDefaults?.agentEngine ?? null; + const displayModel = modelInfo ?? (inheritedModel ? `${inheritedModel} (inherited)` : null); + const displayEngine = engineInfo ?? (inheritedEngine ? `${inheritedEngine} (inherited)` : null); + + // Check if the agent's effective engine has credentials configured + // Only check when there is an explicit agent-level engine override + const agentEngineId = config?.agentEngine ?? null; + const hasMissingCredentials = + agentEngineId !== null && !engineHasCredentials(agentEngineId, configuredCredentialKeys); + + return ( + onSelect(type)}> + {label} + + {activeTriggerCount === 0 ? ( + + Inactive + + ) : config ? ( +
+ + Configured + + {hasMissingCredentials && ( + + + + + Missing credentials + + + + This agent uses the {agentEngineId} engine but no credentials are configured for + it. Configure credentials on the Harness tab. + + + )} +
+ ) : ( + + Default + + )} +
+ + {displayModel || displayEngine ? ( + + {displayEngine && {displayEngine}} + {displayEngine && displayModel && · } + {displayModel && {displayModel}} + {hasCustomEngineSettings && ( + + Custom settings + + )} + + ) : ( + + )} + + + {activeTriggerCount > 0 ? ( + {activeTriggerCount} active + ) : ( + + + + + None + + + + No triggers configured — this agent won't process any events + + + )} + + +
+ {config && ( + + )} + +
+
+
+ ); +} + +export function AgentListView({ + enabledAgentTypes, + availableAgentTypes, + configByAgent, + triggersByAgent, + integrations, + onSelect, + onDelete, + onEnable, + isDeleting, + isEnabling, + projectModel, + projectEngine, + systemDefaults, + configuredCredentialKeys, +}: AgentListViewProps) { + const [deleteTarget, setDeleteTarget] = useState<{ id: number; label: string } | null>(null); + + return ( + <> + {enabledAgentTypes.length === 0 ? ( +
+ No agents enabled. Enable agents below to start processing. +
+ ) : ( +
+ + + + + Agent + Status + Engine / Model + Active Triggers + + + + + {enabledAgentTypes.map((type) => ( + setDeleteTarget({ id, label })} + projectModel={projectModel} + projectEngine={projectEngine} + systemDefaults={systemDefaults} + configuredCredentialKeys={configuredCredentialKeys} + /> + ))} + +
+
+
+ )} + + {availableAgentTypes.length > 0 && ( +
+

Available Agents

+
+ {availableAgentTypes.map((agentType) => { + const label = + (AGENT_LABELS as Record)[agentType] ?? agentType; + return ( +
+ {label} + +
+ ); + })} +
+
+ )} + + !open && setDeleteTarget(null)}> + + + Delete Agent Config + + Are you sure you want to delete the config for {deleteTarget?.label}? + The agent will be disabled and no longer process any events. This action cannot be + undone. + + + + Cancel + { + if (deleteTarget) { + onDelete(deleteTarget.id); + setDeleteTarget(null); + } + }} + className="bg-destructive text-destructive-foreground hover:bg-destructive/90" + > + {isDeleting ? 'Deleting...' : 'Delete'} + + + + + + ); +} diff --git a/web/src/components/projects/agent-config-types.ts b/web/src/components/projects/agent-config-types.ts new file mode 100644 index 00000000..ac8d4ebd --- /dev/null +++ b/web/src/components/projects/agent-config-types.ts @@ -0,0 +1,167 @@ +/** + * Shared types for the agent configuration components. + * + * Extracted from project-agent-configs.tsx so each sub-module can import + * only what it needs without circular dependencies. + */ + +import type { ResolvedTrigger } from '@/components/shared/definition-trigger-toggles.js'; +import type { TriggerParameterValue } from '@/lib/trigger-agent-mapping.js'; + +export interface AgentConfig { + id: number; + agentType: string; + model: string | null; + maxIterations: number | null; + agentEngine: string | null; + agentEngineSettings: Record> | null; + maxConcurrency: number | null; + systemPrompt: string | null; + taskPrompt: string | null; +} + +interface EngineSettingFieldOption { + value: string; + label: string; +} + +export type EngineSettingField = + | { + key: string; + label: string; + type: 'select'; + description?: string; + options: EngineSettingFieldOption[]; + } + | { key: string; label: string; type: 'boolean'; description?: string } + | { + key: string; + label: string; + type: 'number'; + description?: string; + min?: number; + max?: number; + step?: number; + }; + +export interface Engine { + id: string; + label: string; + settings?: { + title?: string; + description?: string; + fields: EngineSettingField[]; + }; +} + +export interface SaveConfigValues { + model: string; + maxIterations: string; + agentEngine: string; + maxConcurrency: string; + engineSettings: Record> | undefined; + systemPrompt: string; + taskPrompt: string; + /** True when the user explicitly cleared the system prompt override (send null, not the fallback text). */ + systemPromptCleared: boolean; + /** True when the user explicitly cleared the task prompt override (send null, not the fallback text). */ + taskPromptCleared: boolean; +} + +export interface SystemDefaults { + model: string; + maxIterations: number; + agentEngine: string; + engineSettings: Record>; +} + +export interface DefinitionAgentSectionProps { + agentType: string; + projectId: string; + config: AgentConfig | null; + triggers: ResolvedTrigger[]; + integrations: { + pm: string | null; + scm: string | null; + }; + engines: Engine[]; + isSaving: boolean; + onSaveConfig: (agentType: string, configId: number | null, values: SaveConfigValues) => void; + saveSuccessNonce: number; + onDeleteConfig: (id: number) => void; + onTriggerToggle: (agentType: string, event: string, enabled: boolean) => void; + onTriggerParamChange: ( + agentType: string, + event: string, + parameters: Record, + currentEnabled: boolean, + ) => void; + /** Project-level model (null = use system default). */ + projectModel: string | null; + /** Project-level engine (null = use system default). */ + projectEngine: string | null; + /** Project-level maxIterations (null = use system default). */ + projectMaxIterations: number | null; + /** System-level defaults from the backend. */ + systemDefaults: SystemDefaults | undefined; +} + +export interface AgentRowProps { + type: string; + config: AgentConfig | null; + triggers: ResolvedTrigger[]; + integrations: { pm: string | null; scm: string | null }; + onSelect: (agentType: string) => void; + onDeleteRequest: (id: number, label: string) => void; + /** Project-level model to show as "inherited" when agent has no override. */ + projectModel: string | null; + /** Project-level engine to show as "inherited" when agent has no override. */ + projectEngine: string | null; + /** System-level defaults. */ + systemDefaults: SystemDefaults | undefined; + /** Set of credential env-var keys that are configured for this project. */ + configuredCredentialKeys: Set; +} + +export interface AgentListViewProps { + enabledAgentTypes: string[]; + availableAgentTypes: string[]; + configByAgent: Map; + triggersByAgent: Map; + integrations: { pm: string | null; scm: string | null }; + onSelect: (agentType: string) => void; + onDelete: (id: number) => void; + onEnable: (agentType: string) => void; + isDeleting: boolean; + isEnabling: boolean; + projectModel: string | null; + projectEngine: string | null; + systemDefaults: SystemDefaults | undefined; + /** Set of credential env-var keys that are configured for this project. */ + configuredCredentialKeys: Set; +} + +export interface AgentDetailViewProps { + agentType: string; + projectId: string; + config: AgentConfig | null; + triggers: ResolvedTrigger[]; + integrations: { pm: string | null; scm: string | null }; + engines: Engine[]; + isSaving: boolean; + onSaveConfig: (agentType: string, configId: number | null, values: SaveConfigValues) => void; + saveSuccessNonce: number; + onDeleteConfig: (id: number) => void; + onTriggerToggle: (agentType: string, event: string, enabled: boolean) => void; + onTriggerParamChange: ( + agentType: string, + event: string, + parameters: Record, + currentEnabled: boolean, + ) => void; + onBack: () => void; + projectModel: string | null; + projectEngine: string | null; + projectMaxIterations: number | null; + systemDefaults: SystemDefaults | undefined; +} diff --git a/web/src/components/projects/agent-config-utils.ts b/web/src/components/projects/agent-config-utils.ts new file mode 100644 index 00000000..80296d2c --- /dev/null +++ b/web/src/components/projects/agent-config-utils.ts @@ -0,0 +1,40 @@ +/** + * Pure utility functions for agent configuration components. + * These functions are free of React and UI dependencies — easy to unit-test. + */ + +import type { ResolvedTrigger } from '../shared/definition-trigger-toggles.js'; +import { engineCredentialKeys } from './engine-secrets.js'; + +/** + * Returns true when the given engine has at least one credential key configured. + * Derived from ENGINE_SECRETS in engine-secrets.ts — no separate mapping to maintain. + * If the engine is not in the map, we conservatively assume credentials are present. + */ +export function engineHasCredentials( + engineId: string, + configuredCredentialKeys: Set, +): boolean { + const requiredKeys = engineCredentialKeys[engineId]; + if (!requiredKeys) return true; // Unknown engine — assume ok + return requiredKeys.some((key) => configuredCredentialKeys.has(key)); +} + +/** + * Counts the number of active triggers for an agent, filtering by provider + * when the trigger has provider restrictions. + */ +export function countActiveTriggers( + triggers: ResolvedTrigger[], + integrations: { pm: string | null; scm: string | null }, +): number { + return triggers.filter((t) => { + if (!t.enabled) return false; + const [category] = t.event.split(':'); + if (t.providers && t.providers.length > 0) { + const activeProvider = integrations[category as keyof typeof integrations]; + return t.providers.some((p) => p === activeProvider); + } + return true; + }).length; +} diff --git a/web/src/components/projects/integration-alerting-tab.tsx b/web/src/components/projects/integration-alerting-tab.tsx new file mode 100644 index 00000000..d36745ed --- /dev/null +++ b/web/src/components/projects/integration-alerting-tab.tsx @@ -0,0 +1,198 @@ +/** + * Alerting (Sentry) integration tab component. + */ +import { useMutation, useQuery, useQueryClient } from '@tanstack/react-query'; +import { Trash2 } from 'lucide-react'; +import { useState } from 'react'; +import { Input } from '@/components/ui/input.js'; +import { Label } from '@/components/ui/label.js'; +import { API_URL } from '@/lib/api.js'; +import { trpc, trpcClient } from '@/lib/trpc.js'; +import { CopyButton } from './integration-scm-tab.js'; +import { ProjectSecretField } from './project-secret-field.js'; + +// ============================================================================ +// Alerting Tab (Sentry) +// ============================================================================ + +interface AlertingTabProps { + projectId: string; + alertingIntegration?: Record; +} + +export function AlertingTab({ projectId, alertingIntegration }: AlertingTabProps) { + const queryClient = useQueryClient(); + + const existingConfig = (alertingIntegration?.config as Record) ?? {}; + const [organizationSlug, setOrganizationSlug] = useState( + (existingConfig.organizationSlug as string) ?? '', + ); + + const [verifyResult, setVerifyResult] = useState<{ + id: string; + name: string; + slug: string; + } | null>(null); + const [verifyError, setVerifyError] = useState(null); + const [isVerifying, setIsVerifying] = useState(false); + + const callbackBaseUrl = + API_URL || + (typeof window !== 'undefined' ? window.location.origin.replace(':5173', ':3000') : ''); + + const sentryWebhookUrl = callbackBaseUrl + ? `${callbackBaseUrl}/sentry/webhook/${projectId}` + : `/sentry/webhook/${projectId}`; + + const credentialsQuery = useQuery(trpc.projects.credentials.list.queryOptions({ projectId })); + const credentials = credentialsQuery.data ?? []; + const apiTokenCred = credentials.find((c) => c.envVarKey === 'SENTRY_API_TOKEN'); + const webhookSecretCred = credentials.find((c) => c.envVarKey === 'SENTRY_WEBHOOK_SECRET'); + + const handleVerify = async (rawToken: string) => { + if (!rawToken) { + setVerifyError('Enter the API token value to verify it'); + return; + } + if (!organizationSlug) { + setVerifyError('Enter the organization slug to verify it'); + return; + } + setIsVerifying(true); + setVerifyError(null); + setVerifyResult(null); + try { + const result = await trpcClient.integrationsDiscovery.verifySentry.mutate({ + apiToken: rawToken, + organizationSlug, + }); + setVerifyResult(result); + } catch (err) { + setVerifyError(err instanceof Error ? err.message : String(err)); + } finally { + setIsVerifying(false); + } + }; + + const saveMutation = useMutation({ + mutationFn: async () => { + return trpcClient.projects.integrations.upsert.mutate({ + projectId, + category: 'alerting', + provider: 'sentry', + config: { organizationSlug }, + }); + }, + onSuccess: () => { + queryClient.invalidateQueries({ + queryKey: trpc.projects.integrations.list.queryOptions({ projectId }).queryKey, + }); + }, + }); + + const deleteMutation = useMutation({ + mutationFn: async () => { + return trpcClient.projects.integrations.delete.mutate({ + projectId, + category: 'alerting', + }); + }, + onSuccess: () => { + queryClient.invalidateQueries({ + queryKey: trpc.projects.integrations.list.queryOptions({ projectId }).queryKey, + }); + }, + }); + + return ( +
+ {/* Organization Slug */} +
+ +

+ Your Sentry organization slug (found in your Sentry URL:{' '} + sentry.io/organizations/<slug>/). +

+ setOrganizationSlug(e.target.value)} + placeholder="my-organization" + /> +
+ +
+ + {/* Credentials */} +
+ + + +
+ +
+ + {/* Sentry Webhook URL */} +
+ +

+ Configure this URL in your Sentry project's webhook settings to receive alerts. +

+
+ {sentryWebhookUrl} + +
+
+ +
+ + {/* Save / Delete */} +
+ + {saveMutation.isSuccess && Saved} + {saveMutation.isError && ( + {saveMutation.error.message} + )} + {alertingIntegration && ( + + )} + {deleteMutation.isError && ( + {deleteMutation.error.message} + )} +
+
+ ); +} diff --git a/web/src/components/projects/integration-form.tsx b/web/src/components/projects/integration-form.tsx index cc69f51e..328bed69 100644 --- a/web/src/components/projects/integration-form.tsx +++ b/web/src/components/projects/integration-form.tsx @@ -1,618 +1,12 @@ -import { useMutation, useQuery, useQueryClient } from '@tanstack/react-query'; -import { - AlertCircle, - AlertTriangle, - Check, - Clipboard, - ExternalLink, - Info, - Loader2, - RefreshCw, - Trash2, -} from 'lucide-react'; -import { useEffect, useState } from 'react'; -import { Input } from '@/components/ui/input.js'; -import { Label } from '@/components/ui/label.js'; -import { API_URL } from '@/lib/api.js'; -import { trpc, trpcClient } from '@/lib/trpc.js'; +import { useQuery } from '@tanstack/react-query'; +import { useState } from 'react'; +import { trpc } from '@/lib/trpc.js'; +import { AlertingTab } from './integration-alerting-tab.js'; +import { SCMTab } from './integration-scm-tab.js'; import { PMWizard } from './pm-wizard.js'; -import { ProjectSecretField } from './project-secret-field.js'; type IntegrationCategory = 'pm' | 'scm' | 'alerting'; -// ============================================================================ -// GitHub Credential Slots (replaces the old CredentialSelector dropdowns) -// ============================================================================ - -function GitHubCredentialSlots({ projectId }: { projectId: string }) { - const credentialsQuery = useQuery(trpc.projects.credentials.list.queryOptions({ projectId })); - - const [verifiedLogins, setVerifiedLogins] = useState>({}); - const [verifyErrors, setVerifyErrors] = useState>({}); - const [verifyingRoles, setVerifyingRoles] = useState>({}); - - const credentials = credentialsQuery.data ?? []; - const implementerCred = credentials.find((c) => c.envVarKey === 'GITHUB_TOKEN_IMPLEMENTER'); - const reviewerCred = credentials.find((c) => c.envVarKey === 'GITHUB_TOKEN_REVIEWER'); - - const handleVerify = async (role: string, rawValue: string) => { - // If no new value entered, we can't verify (we never return plaintext to browser) - if (!rawValue) { - setVerifyErrors((prev) => ({ - ...prev, - [role]: 'Enter the token value to verify it', - })); - return; - } - setVerifyingRoles((prev) => ({ ...prev, [role]: true })); - try { - const result = await trpcClient.integrationsDiscovery.verifyGithubToken.mutate({ - token: rawValue, - }); - setVerifiedLogins((prev) => ({ ...prev, [role]: result.login })); - setVerifyErrors((prev) => ({ ...prev, [role]: null })); - } catch (err) { - setVerifiedLogins((prev) => ({ ...prev, [role]: null })); - setVerifyErrors((prev) => ({ - ...prev, - [role]: err instanceof Error ? err.message : String(err), - })); - } finally { - setVerifyingRoles((prev) => ({ ...prev, [role]: false })); - } - }; - - return ( -
- - handleVerify('implementer', val)} - isVerifying={verifyingRoles.implementer} - verifyError={verifyErrors.implementer} - /> - handleVerify('reviewer', val)} - isVerifying={verifyingRoles.reviewer} - verifyError={verifyErrors.reviewer} - /> -
- ); -} - -// ============================================================================ -// GitHub Webhook Management -// ============================================================================ - -function CopyButton({ text }: { text: string }) { - const [copied, setCopied] = useState(false); - const handleCopy = async () => { - await navigator.clipboard.writeText(text); - setCopied(true); - setTimeout(() => setCopied(false), 2000); - }; - return ( - - ); -} - -function GitHubWebhookSection({ projectId }: { projectId: string }) { - const queryClient = useQueryClient(); - - const callbackBaseUrl = - API_URL || - (typeof window !== 'undefined' ? window.location.origin.replace(':5173', ':3000') : ''); - - const webhooksQuery = useQuery(trpc.webhooks.list.queryOptions({ projectId })); - - const createGithubWebhookMutation = useMutation({ - mutationFn: () => - trpcClient.webhooks.create.mutate({ - projectId, - callbackBaseUrl, - githubOnly: true, - }), - onSuccess: () => { - queryClient.invalidateQueries({ - queryKey: trpc.webhooks.list.queryOptions({ projectId }).queryKey, - }); - }, - }); - - const deleteGithubWebhookMutation = useMutation({ - mutationFn: (deleteCallbackBaseUrl: string) => - trpcClient.webhooks.delete.mutate({ - projectId, - callbackBaseUrl: deleteCallbackBaseUrl, - githubOnly: true, - }), - onSuccess: () => { - queryClient.invalidateQueries({ - queryKey: trpc.webhooks.list.queryOptions({ projectId }).queryKey, - }); - }, - }); - - const activeGithubWebhooks = (webhooksQuery.data?.github ?? []).map((w) => ({ - id: String(w.id), - url: w.config.url ?? '', - active: w.active, - })); - - const webhookCallbackUrl = callbackBaseUrl - ? `${callbackBaseUrl}/github/webhook` - : '/github/webhook'; - const githubCurlCommand = [ - 'curl -X POST "https://api.github.com/repos///hooks" \\', - ' -H "Authorization: Bearer " \\', - ' -H "Content-Type: application/json" \\', - " -d '{", - ' "name": "web",', - ' "active": true,', - ' "events": ["push", "pull_request", "check_suite", "pull_request_review"],', - ' "config": {', - ` "url": "${webhookCallbackUrl}",`, - ' "content_type": "json"', - ' }', - " }'", - ].join('\n'); - - return ( -
-
- -

- Manage GitHub webhooks for receiving push events, PR updates, and CI status notifications. -

-
- - {/* GitHub-specific error */} - {webhooksQuery.data?.errors?.github && ( -
- -
- GitHub - - : {String(webhooksQuery.data.errors.github)} - -
- -
- )} - - {/* Active webhooks list */} - {webhooksQuery.isLoading ? ( -
- Loading webhooks... -
- ) : activeGithubWebhooks.length > 0 ? ( -
- {activeGithubWebhooks.map((w) => ( -
-
- - {w.url} -
- -
- ))} -
- ) : ( -
- - No GitHub webhooks configured for this project. -
- )} - - {/* curl instructions for manual GitHub webhook creation (collapsible) */} -
- - -

- Manual webhook creation (alternative: if the button below doesn't work) -

-
-
-

- Use the following curl command to create the GitHub webhook manually. Requires a token - with admin:repo_hook scope. -

-
-
- -
-
-							{githubCurlCommand}
-						
-
-
-
- - {/* Create webhook button */} -
- - {createGithubWebhookMutation.isError && ( -

{createGithubWebhookMutation.error.message}

- )} - {createGithubWebhookMutation.isSuccess && ( -

- GitHub webhook created successfully. -

- )} -
-
- ); -} - -// ============================================================================ -// SCM Tab (GitHub) -// ============================================================================ - -interface SCMTabProject { - repo?: string | null; - baseBranch?: string | null; - branchPrefix?: string | null; -} - -function SCMTab({ projectId, project }: { projectId: string; project?: SCMTabProject }) { - const queryClient = useQueryClient(); - - // Project-level SCM fields - const [repo, setRepo] = useState(project?.repo ?? ''); - const [baseBranch, setBaseBranch] = useState(project?.baseBranch ?? 'main'); - const [branchPrefix, setBranchPrefix] = useState(project?.branchPrefix ?? 'feature/'); - - useEffect(() => { - setRepo(project?.repo ?? ''); - setBaseBranch(project?.baseBranch ?? 'main'); - setBranchPrefix(project?.branchPrefix ?? 'feature/'); - }, [project?.repo, project?.baseBranch, project?.branchPrefix]); - - const saveMutation = useMutation({ - mutationFn: async () => { - // Save project-level SCM fields - await trpcClient.projects.update.mutate({ - id: projectId, - repo: repo || undefined, - baseBranch, - branchPrefix, - }); - - // Note: triggers are intentionally omitted — they are managed via the Agent Configs tab - const result = await trpcClient.projects.integrations.upsert.mutate({ - projectId, - category: 'scm', - provider: 'github', - config: {}, - }); - - return result; - }, - onSuccess: () => { - queryClient.invalidateQueries({ - queryKey: trpc.projects.getById.queryOptions({ id: projectId }).queryKey, - }); - queryClient.invalidateQueries({ - queryKey: trpc.projects.listFull.queryOptions().queryKey, - }); - queryClient.invalidateQueries({ - queryKey: trpc.projects.integrations.list.queryOptions({ projectId }).queryKey, - }); - }, - }); - - return ( -
- {/* Repository Settings */} -
- -
- - setRepo(e.target.value)} - placeholder="owner/repo" - /> -
-
-
- - setBaseBranch(e.target.value)} - placeholder="main" - /> -
-
- - setBranchPrefix(e.target.value)} - placeholder="feature/" - /> -
-
-
- -
- -

- CASCADE uses two separate GitHub bot accounts to prevent feedback loops. The{' '} - implementer writes code and creates PRs. The reviewer{' '} - reviews PRs and can approve or request changes. -

- - - -

- Trigger configuration has moved to the Agents tab. -

- -
- - {saveMutation.isSuccess && Saved} - {saveMutation.isError && ( - {saveMutation.error.message} - )} -
- -
- - -
- ); -} - -// ============================================================================ -// Alerting Tab (Sentry) -// ============================================================================ - -interface AlertingTabProps { - projectId: string; - alertingIntegration?: Record; -} - -function AlertingTab({ projectId, alertingIntegration }: AlertingTabProps) { - const queryClient = useQueryClient(); - - const existingConfig = (alertingIntegration?.config as Record) ?? {}; - const [organizationSlug, setOrganizationSlug] = useState( - (existingConfig.organizationSlug as string) ?? '', - ); - - const [verifyResult, setVerifyResult] = useState<{ - id: string; - name: string; - slug: string; - } | null>(null); - const [verifyError, setVerifyError] = useState(null); - const [isVerifying, setIsVerifying] = useState(false); - - const callbackBaseUrl = - API_URL || - (typeof window !== 'undefined' ? window.location.origin.replace(':5173', ':3000') : ''); - - const sentryWebhookUrl = callbackBaseUrl - ? `${callbackBaseUrl}/sentry/webhook/${projectId}` - : `/sentry/webhook/${projectId}`; - - const credentialsQuery = useQuery(trpc.projects.credentials.list.queryOptions({ projectId })); - const credentials = credentialsQuery.data ?? []; - const apiTokenCred = credentials.find((c) => c.envVarKey === 'SENTRY_API_TOKEN'); - const webhookSecretCred = credentials.find((c) => c.envVarKey === 'SENTRY_WEBHOOK_SECRET'); - - const handleVerify = async (rawToken: string) => { - if (!rawToken) { - setVerifyError('Enter the API token value to verify it'); - return; - } - if (!organizationSlug) { - setVerifyError('Enter the organization slug to verify it'); - return; - } - setIsVerifying(true); - setVerifyError(null); - setVerifyResult(null); - try { - const result = await trpcClient.integrationsDiscovery.verifySentry.mutate({ - apiToken: rawToken, - organizationSlug, - }); - setVerifyResult(result); - } catch (err) { - setVerifyError(err instanceof Error ? err.message : String(err)); - } finally { - setIsVerifying(false); - } - }; - - const saveMutation = useMutation({ - mutationFn: async () => { - return trpcClient.projects.integrations.upsert.mutate({ - projectId, - category: 'alerting', - provider: 'sentry', - config: { organizationSlug }, - }); - }, - onSuccess: () => { - queryClient.invalidateQueries({ - queryKey: trpc.projects.integrations.list.queryOptions({ projectId }).queryKey, - }); - }, - }); - - const deleteMutation = useMutation({ - mutationFn: async () => { - return trpcClient.projects.integrations.delete.mutate({ - projectId, - category: 'alerting', - }); - }, - onSuccess: () => { - queryClient.invalidateQueries({ - queryKey: trpc.projects.integrations.list.queryOptions({ projectId }).queryKey, - }); - }, - }); - - return ( -
- {/* Organization Slug */} -
- -

- Your Sentry organization slug (found in your Sentry URL:{' '} - sentry.io/organizations/<slug>/). -

- setOrganizationSlug(e.target.value)} - placeholder="my-organization" - /> -
- -
- - {/* Credentials */} -
- - - -
- -
- - {/* Sentry Webhook URL */} -
- -

- Configure this URL in your Sentry project's webhook settings to receive alerts. -

-
- {sentryWebhookUrl} - -
-
- -
- - {/* Save / Delete */} -
- - {saveMutation.isSuccess && Saved} - {saveMutation.isError && ( - {saveMutation.error.message} - )} - {alertingIntegration && ( - - )} - {deleteMutation.isError && ( - {deleteMutation.error.message} - )} -
-
- ); -} - // ============================================================================ // Helpers // ============================================================================ diff --git a/web/src/components/projects/integration-scm-tab.tsx b/web/src/components/projects/integration-scm-tab.tsx new file mode 100644 index 00000000..81233f54 --- /dev/null +++ b/web/src/components/projects/integration-scm-tab.tsx @@ -0,0 +1,434 @@ +/** + * SCM (GitHub) integration tab components. + * Contains: CopyButton, GitHubCredentialSlots, GitHubWebhookSection, SCMTab. + * CopyButton is co-located here and also exported for use by AlertingTab. + */ +import { useMutation, useQuery, useQueryClient } from '@tanstack/react-query'; +import { + AlertCircle, + AlertTriangle, + Check, + Clipboard, + ExternalLink, + Info, + Loader2, + RefreshCw, + Trash2, +} from 'lucide-react'; +import { useEffect, useState } from 'react'; +import { Input } from '@/components/ui/input.js'; +import { Label } from '@/components/ui/label.js'; +import { API_URL } from '@/lib/api.js'; +import { trpc, trpcClient } from '@/lib/trpc.js'; +import { ProjectSecretField } from './project-secret-field.js'; + +// ============================================================================ +// CopyButton (shared with AlertingTab) +// ============================================================================ + +export function CopyButton({ text }: { text: string }) { + const [copied, setCopied] = useState(false); + const handleCopy = async () => { + await navigator.clipboard.writeText(text); + setCopied(true); + setTimeout(() => setCopied(false), 2000); + }; + return ( + + ); +} + +// ============================================================================ +// GitHub Credential Slots (replaces the old CredentialSelector dropdowns) +// ============================================================================ + +function GitHubCredentialSlots({ projectId }: { projectId: string }) { + const credentialsQuery = useQuery(trpc.projects.credentials.list.queryOptions({ projectId })); + + const [verifiedLogins, setVerifiedLogins] = useState>({}); + const [verifyErrors, setVerifyErrors] = useState>({}); + const [verifyingRoles, setVerifyingRoles] = useState>({}); + + const credentials = credentialsQuery.data ?? []; + const implementerCred = credentials.find((c) => c.envVarKey === 'GITHUB_TOKEN_IMPLEMENTER'); + const reviewerCred = credentials.find((c) => c.envVarKey === 'GITHUB_TOKEN_REVIEWER'); + + const handleVerify = async (role: string, rawValue: string) => { + // If no new value entered, we can't verify (we never return plaintext to browser) + if (!rawValue) { + setVerifyErrors((prev) => ({ + ...prev, + [role]: 'Enter the token value to verify it', + })); + return; + } + setVerifyingRoles((prev) => ({ ...prev, [role]: true })); + try { + const result = await trpcClient.integrationsDiscovery.verifyGithubToken.mutate({ + token: rawValue, + }); + setVerifiedLogins((prev) => ({ ...prev, [role]: result.login })); + setVerifyErrors((prev) => ({ ...prev, [role]: null })); + } catch (err) { + setVerifiedLogins((prev) => ({ ...prev, [role]: null })); + setVerifyErrors((prev) => ({ + ...prev, + [role]: err instanceof Error ? err.message : String(err), + })); + } finally { + setVerifyingRoles((prev) => ({ ...prev, [role]: false })); + } + }; + + return ( +
+ + handleVerify('implementer', val)} + isVerifying={verifyingRoles.implementer} + verifyError={verifyErrors.implementer} + /> + handleVerify('reviewer', val)} + isVerifying={verifyingRoles.reviewer} + verifyError={verifyErrors.reviewer} + /> +
+ ); +} + +// ============================================================================ +// GitHub Webhook Management +// ============================================================================ + +function GitHubWebhookSection({ projectId }: { projectId: string }) { + const queryClient = useQueryClient(); + + const callbackBaseUrl = + API_URL || + (typeof window !== 'undefined' ? window.location.origin.replace(':5173', ':3000') : ''); + + const webhooksQuery = useQuery(trpc.webhooks.list.queryOptions({ projectId })); + + const createGithubWebhookMutation = useMutation({ + mutationFn: () => + trpcClient.webhooks.create.mutate({ + projectId, + callbackBaseUrl, + githubOnly: true, + }), + onSuccess: () => { + queryClient.invalidateQueries({ + queryKey: trpc.webhooks.list.queryOptions({ projectId }).queryKey, + }); + }, + }); + + const deleteGithubWebhookMutation = useMutation({ + mutationFn: (deleteCallbackBaseUrl: string) => + trpcClient.webhooks.delete.mutate({ + projectId, + callbackBaseUrl: deleteCallbackBaseUrl, + githubOnly: true, + }), + onSuccess: () => { + queryClient.invalidateQueries({ + queryKey: trpc.webhooks.list.queryOptions({ projectId }).queryKey, + }); + }, + }); + + const activeGithubWebhooks = (webhooksQuery.data?.github ?? []).map((w) => ({ + id: String(w.id), + url: w.config.url ?? '', + active: w.active, + })); + + const webhookCallbackUrl = callbackBaseUrl + ? `${callbackBaseUrl}/github/webhook` + : '/github/webhook'; + const githubCurlCommand = [ + 'curl -X POST "https://api.github.com/repos///hooks" \\', + ' -H "Authorization: Bearer " \\', + ' -H "Content-Type: application/json" \\', + " -d '{", + ' "name": "web",', + ' "active": true,', + ' "events": ["push", "pull_request", "check_suite", "pull_request_review"],', + ' "config": {', + ` "url": "${webhookCallbackUrl}",`, + ' "content_type": "json"', + ' }', + " }'", + ].join('\n'); + + return ( +
+
+ +

+ Manage GitHub webhooks for receiving push events, PR updates, and CI status notifications. +

+
+ + {/* GitHub-specific error */} + {webhooksQuery.data?.errors?.github && ( +
+ +
+ GitHub + + : {String(webhooksQuery.data.errors.github)} + +
+ +
+ )} + + {/* Active webhooks list */} + {webhooksQuery.isLoading ? ( +
+ Loading webhooks... +
+ ) : activeGithubWebhooks.length > 0 ? ( +
+ {activeGithubWebhooks.map((w) => ( +
+
+ + {w.url} +
+ +
+ ))} +
+ ) : ( +
+ + No GitHub webhooks configured for this project. +
+ )} + + {/* curl instructions for manual GitHub webhook creation (collapsible) */} +
+ + +

+ Manual webhook creation (alternative: if the button below doesn't work) +

+
+
+

+ Use the following curl command to create the GitHub webhook manually. Requires a token + with admin:repo_hook scope. +

+
+
+ +
+
+							{githubCurlCommand}
+						
+
+
+
+ + {/* Create webhook button */} +
+ + {createGithubWebhookMutation.isError && ( +

{createGithubWebhookMutation.error.message}

+ )} + {createGithubWebhookMutation.isSuccess && ( +

+ GitHub webhook created successfully. +

+ )} +
+
+ ); +} + +// ============================================================================ +// SCM Tab (GitHub) +// ============================================================================ + +interface SCMTabProject { + repo?: string | null; + baseBranch?: string | null; + branchPrefix?: string | null; +} + +export function SCMTab({ projectId, project }: { projectId: string; project?: SCMTabProject }) { + const queryClient = useQueryClient(); + + // Project-level SCM fields + const [repo, setRepo] = useState(project?.repo ?? ''); + const [baseBranch, setBaseBranch] = useState(project?.baseBranch ?? 'main'); + const [branchPrefix, setBranchPrefix] = useState(project?.branchPrefix ?? 'feature/'); + + useEffect(() => { + setRepo(project?.repo ?? ''); + setBaseBranch(project?.baseBranch ?? 'main'); + setBranchPrefix(project?.branchPrefix ?? 'feature/'); + }, [project?.repo, project?.baseBranch, project?.branchPrefix]); + + const saveMutation = useMutation({ + mutationFn: async () => { + // Save project-level SCM fields + await trpcClient.projects.update.mutate({ + id: projectId, + repo: repo || undefined, + baseBranch, + branchPrefix, + }); + + // Note: triggers are intentionally omitted — they are managed via the Agent Configs tab + const result = await trpcClient.projects.integrations.upsert.mutate({ + projectId, + category: 'scm', + provider: 'github', + config: {}, + }); + + return result; + }, + onSuccess: () => { + queryClient.invalidateQueries({ + queryKey: trpc.projects.getById.queryOptions({ id: projectId }).queryKey, + }); + queryClient.invalidateQueries({ + queryKey: trpc.projects.listFull.queryOptions().queryKey, + }); + queryClient.invalidateQueries({ + queryKey: trpc.projects.integrations.list.queryOptions({ projectId }).queryKey, + }); + }, + }); + + return ( +
+ {/* Repository Settings */} +
+ +
+ + setRepo(e.target.value)} + placeholder="owner/repo" + /> +
+
+
+ + setBaseBranch(e.target.value)} + placeholder="main" + /> +
+
+ + setBranchPrefix(e.target.value)} + placeholder="feature/" + /> +
+
+
+ +
+ +

+ CASCADE uses two separate GitHub bot accounts to prevent feedback loops. The{' '} + implementer writes code and creates PRs. The reviewer{' '} + reviews PRs and can approve or request changes. +

+ + + +

+ Trigger configuration has moved to the Agents tab. +

+ +
+ + {saveMutation.isSuccess && Saved} + {saveMutation.isError && ( + {saveMutation.error.message} + )} +
+ +
+ + +
+ ); +} diff --git a/web/src/components/projects/project-agent-configs.tsx b/web/src/components/projects/project-agent-configs.tsx index be9d3ede..34d6787c 100644 --- a/web/src/components/projects/project-agent-configs.tsx +++ b/web/src/components/projects/project-agent-configs.tsx @@ -1,882 +1,17 @@ import { useMutation, useQuery, useQueryClient } from '@tanstack/react-query'; -import { AlertTriangle, ArrowLeft, ChevronRight, Trash2 } from 'lucide-react'; -import { useEffect, useMemo, useRef, useState } from 'react'; +import { useState } from 'react'; import { toast } from 'sonner'; -import { engineCredentialKeys } from '@/components/projects/engine-secrets.js'; -import { EngineSettingsFields } from '@/components/settings/engine-settings-fields.js'; -import { ModelField } from '@/components/settings/model-field.js'; -import { - DefinitionTriggerToggles, - type ResolvedTrigger, -} from '@/components/shared/definition-trigger-toggles.js'; -import { - AlertDialog, - AlertDialogAction, - AlertDialogCancel, - AlertDialogContent, - AlertDialogDescription, - AlertDialogFooter, - AlertDialogHeader, - AlertDialogTitle, -} from '@/components/ui/alert-dialog.js'; -import { Badge } from '@/components/ui/badge.js'; -import { Input } from '@/components/ui/input.js'; -import { Label } from '@/components/ui/label.js'; -import { - Select, - SelectContent, - SelectItem, - SelectTrigger, - SelectValue, -} from '@/components/ui/select.js'; -import { - Table, - TableBody, - TableCell, - TableHead, - TableHeader, - TableRow, -} from '@/components/ui/table.js'; -import { Tabs, TabsContent, TabsList, TabsTrigger } from '@/components/ui/tabs.js'; -import { - Tooltip, - TooltipContent, - TooltipProvider, - TooltipTrigger, -} from '@/components/ui/tooltip.js'; -import { - AGENT_LABELS, - CATEGORY_LABELS, - type TriggerParameterValue, -} from '@/lib/trigger-agent-mapping.js'; +import type { ResolvedTrigger } from '@/components/shared/definition-trigger-toggles.js'; +import type { TriggerParameterValue } from '@/lib/trigger-agent-mapping.js'; import { trpc, trpcClient } from '@/lib/trpc.js'; -import { AgentPromptOverrides } from './agent-prompt-overrides.js'; - -interface AgentConfig { - id: number; - agentType: string; - model: string | null; - maxIterations: number | null; - agentEngine: string | null; - agentEngineSettings: Record> | null; - maxConcurrency: number | null; - systemPrompt: string | null; - taskPrompt: string | null; -} - -interface EngineSettingFieldOption { - value: string; - label: string; -} - -type EngineSettingField = - | { - key: string; - label: string; - type: 'select'; - description?: string; - options: EngineSettingFieldOption[]; - } - | { key: string; label: string; type: 'boolean'; description?: string } - | { - key: string; - label: string; - type: 'number'; - description?: string; - min?: number; - max?: number; - step?: number; - }; - -interface Engine { - id: string; - label: string; - settings?: { - title?: string; - description?: string; - fields: EngineSettingField[]; - }; -} - -// ============================================================================ -// Definition-Based Agent Section (New) -// ============================================================================ - -interface SaveConfigValues { - model: string; - maxIterations: string; - agentEngine: string; - maxConcurrency: string; - engineSettings: Record> | undefined; - systemPrompt: string; - taskPrompt: string; - /** True when the user explicitly cleared the system prompt override (send null, not the fallback text). */ - systemPromptCleared: boolean; - /** True when the user explicitly cleared the task prompt override (send null, not the fallback text). */ - taskPromptCleared: boolean; -} - -interface SystemDefaults { - model: string; - maxIterations: number; - agentEngine: string; - engineSettings: Record>; -} - -interface DefinitionAgentSectionProps { - agentType: string; - projectId: string; - config: AgentConfig | null; - triggers: ResolvedTrigger[]; - integrations: { - pm: string | null; - scm: string | null; - }; - engines: Engine[]; - isSaving: boolean; - onSaveConfig: (agentType: string, configId: number | null, values: SaveConfigValues) => void; - saveSuccessNonce: number; - onDeleteConfig: (id: number) => void; - onTriggerToggle: (agentType: string, event: string, enabled: boolean) => void; - onTriggerParamChange: ( - agentType: string, - event: string, - parameters: Record, - currentEnabled: boolean, - ) => void; - /** Project-level model (null = use system default). */ - projectModel: string | null; - /** Project-level engine (null = use system default). */ - projectEngine: string | null; - /** Project-level maxIterations (null = use system default). */ - projectMaxIterations: number | null; - /** System-level defaults from the backend. */ - systemDefaults: SystemDefaults | undefined; -} - -// biome-ignore lint/complexity/noExcessiveCognitiveComplexity: tabbed detail panel managing Engine/Prompts/Triggers tabs with per-tab state, mutations, and trigger category grouping -function DefinitionAgentSection({ - agentType, - projectId, - config, - triggers, - integrations, - engines, - isSaving, - onSaveConfig, - saveSuccessNonce, - onDeleteConfig, - onTriggerToggle, - onTriggerParamChange, - projectModel, - projectEngine, - projectMaxIterations, - systemDefaults, -}: DefinitionAgentSectionProps) { - const [saved, setSaved] = useState(false); - const savedTimerRef = useRef | null>(null); - // Tracks whether a successful save is in flight (prevents config sync from clearing "Saved") - const justSavedRef = useRef(false); - - // Local form state — engine fields - const [model, setModel] = useState(config?.model ?? ''); - const [maxIterations, setMaxIterations] = useState(config?.maxIterations?.toString() ?? ''); - const [agentEngine, setAgentEngine] = useState(config?.agentEngine ?? ''); - const [maxConcurrency, setMaxConcurrency] = useState(config?.maxConcurrency?.toString() ?? ''); - const [engineSettings, setEngineSettings] = useState< - Record> | undefined - >(config?.agentEngineSettings ?? undefined); - - // Local form state — prompt fields (initialized by AgentPromptOverrides component) - const [systemPrompt, setSystemPrompt] = useState(config?.systemPrompt ?? ''); - const [taskPrompt, setTaskPrompt] = useState(config?.taskPrompt ?? ''); - // Track whether the user explicitly cleared a prompt override so we can send null on save - // instead of the fallback display text (which would create a duplicate "custom" override). - const [systemPromptCleared, setSystemPromptCleared] = useState(false); - const [taskPromptCleared, setTaskPromptCleared] = useState(false); - - const effectiveEngineId = agentEngine || ''; - const effectiveEngine = engines.find((engine) => engine.id === effectiveEngineId); - - // Resolved inherited engine — project override or system default - const inheritedEngine = projectEngine ?? systemDefaults?.agentEngine ?? 'claude-code'; - // Per-field engine defaults for the EngineSettingsFields component - const engineDefaults = - systemDefaults && effectiveEngineId - ? systemDefaults.engineSettings[effectiveEngineId] - : undefined; - - // Resolved inherited model and iterations (walk the chain: project → system) - const inheritedModel = projectModel ?? systemDefaults?.model; - const inheritedMaxIterations = projectMaxIterations ?? systemDefaults?.maxIterations; - - // Sync form state when config changes (e.g. after invalidateQueries refetch) - // Skip clearing "Saved" if we just saved — the nonce effect will handle the timer - useEffect(() => { - setModel(config?.model ?? ''); - setMaxIterations(config?.maxIterations?.toString() ?? ''); - setAgentEngine(config?.agentEngine ?? ''); - setMaxConcurrency(config?.maxConcurrency?.toString() ?? ''); - setEngineSettings(config?.agentEngineSettings ?? undefined); - setSystemPrompt(config?.systemPrompt ?? ''); - setTaskPrompt(config?.taskPrompt ?? ''); - setSystemPromptCleared(false); - setTaskPromptCleared(false); - if (justSavedRef.current) { - justSavedRef.current = false; - } else { - setSaved(false); - } - }, [config]); - - // Show "Saved" indicator only after confirmed persistence (nonce increments on each success) - useEffect(() => { - if (saveSuccessNonce === 0) return; - // Mark that a save just completed so the config sync effect won't clear the indicator - justSavedRef.current = true; - if (savedTimerRef.current !== null) { - clearTimeout(savedTimerRef.current); - } - setSaved(true); - savedTimerRef.current = setTimeout(() => setSaved(false), 2000); - }, [saveSuccessNonce]); - - // Clean up the "Saved" timer on unmount to avoid state updates on unmounted component - useEffect(() => { - return () => { - if (savedTimerRef.current !== null) { - clearTimeout(savedTimerRef.current); - } - }; - }, []); - - // Group triggers by category and filter by active integrations - const triggersByCategory = useMemo(() => { - const groups: Record = { - pm: [], - scm: [], - internal: [], - }; - - for (const trigger of triggers) { - // Extract category from event (e.g., "pm:card-moved" -> "pm") - const [category] = trigger.event.split(':'); - if (category in groups) { - // Filter by provider if the trigger has provider restrictions - if (trigger.providers && trigger.providers.length > 0) { - const activeProvider = integrations[category as keyof typeof integrations]; - const matchesProvider = trigger.providers.some((p) => p === activeProvider); - if (!matchesProvider) continue; - } - groups[category].push(trigger); - } - } - - return groups; - }, [triggers, integrations]); - - const hasTriggers = - triggersByCategory.pm.length > 0 || - triggersByCategory.scm.length > 0 || - triggersByCategory.internal.length > 0; - - const handleSave = () => { - onSaveConfig(agentType, config?.id ?? null, { - model, - maxIterations, - agentEngine, - maxConcurrency, - engineSettings, - systemPrompt, - taskPrompt, - systemPromptCleared, - taskPromptCleared, - }); - }; - - const handleCancel = () => { - setModel(config?.model ?? ''); - setMaxIterations(config?.maxIterations?.toString() ?? ''); - setAgentEngine(config?.agentEngine ?? ''); - setMaxConcurrency(config?.maxConcurrency?.toString() ?? ''); - setEngineSettings(config?.agentEngineSettings ?? undefined); - setSystemPrompt(config?.systemPrompt ?? ''); - setTaskPrompt(config?.taskPrompt ?? ''); - setSystemPromptCleared(false); - setTaskPromptCleared(false); - }; - - const handleDelete = () => { - if (config && window.confirm('Delete this agent config?')) { - onDeleteConfig(config.id); - } - }; - - return ( -
- - - Engine - Prompts - Triggers - - - {/* Engine Tab */} - -
- - -
-
- - -
- {effectiveEngine && ( - - )} -
-
- - setMaxIterations(e.target.value)} - placeholder={ - inheritedMaxIterations !== undefined - ? `${inheritedMaxIterations} (inherited)` - : 'Optional' - } - /> -
-
- - setMaxConcurrency(e.target.value)} - placeholder="Optional" - /> -
-
-
- - {/* Prompts Tab */} - - { - setSystemPrompt(v); - // User is editing manually — cancel any pending clear - setSystemPromptCleared(false); - }} - taskPrompt={taskPrompt} - onTaskPromptChange={(v) => { - setTaskPrompt(v); - // User is editing manually — cancel any pending clear - setTaskPromptCleared(false); - }} - onSystemPromptClear={() => setSystemPromptCleared(true)} - onTaskPromptClear={() => setTaskPromptCleared(true)} - /> - - - {/* Triggers Tab */} - - {(['pm', 'scm', 'internal'] as const).map((category) => { - const categoryTriggers = triggersByCategory[category]; - if (categoryTriggers.length === 0) return null; - - return ( -
-

- {CATEGORY_LABELS[category] ?? category} Triggers -

- onTriggerToggle(agentType, event, enabled)} - onParamChange={(event, params) => { - // Find the current trigger to get its enabled state - const currentTrigger = categoryTriggers.find((t) => t.event === event); - onTriggerParamChange(agentType, event, params, currentTrigger?.enabled ?? true); - }} - idPrefix={`${agentType}-${category}`} - /> -
- ); - })} - - {!hasTriggers && ( -

- No trigger configuration for this agent. -

- )} -
-
- - {/* Footer actions — outside tabs, applies globally */} -
-
- - - {saved && Saved} -
- {config && ( - - )} -
-
- ); -} - -/** - * Returns true when the given engine has at least one credential key configured. - * Derived from ENGINE_SECRETS in engine-secrets.ts — no separate mapping to maintain. - * If the engine is not in the map, we conservatively assume credentials are present. - */ -function engineHasCredentials(engineId: string, configuredCredentialKeys: Set): boolean { - const requiredKeys = engineCredentialKeys[engineId]; - if (!requiredKeys) return true; // Unknown engine — assume ok - return requiredKeys.some((key) => configuredCredentialKeys.has(key)); -} - -// ============================================================================ -// Agent List View -// ============================================================================ - -function countActiveTriggers( - triggers: ResolvedTrigger[], - integrations: { pm: string | null; scm: string | null }, -): number { - return triggers.filter((t) => { - if (!t.enabled) return false; - const [category] = t.event.split(':'); - if (t.providers && t.providers.length > 0) { - const activeProvider = integrations[category as keyof typeof integrations]; - return t.providers.some((p) => p === activeProvider); - } - return true; - }).length; -} - -interface AgentRowProps { - type: string; - config: AgentConfig | null; - triggers: ResolvedTrigger[]; - integrations: { pm: string | null; scm: string | null }; - onSelect: (agentType: string) => void; - onDeleteRequest: (id: number, label: string) => void; - /** Project-level model to show as "inherited" when agent has no override. */ - projectModel: string | null; - /** Project-level engine to show as "inherited" when agent has no override. */ - projectEngine: string | null; - /** System-level defaults. */ - systemDefaults: SystemDefaults | undefined; - /** Set of credential env-var keys that are configured for this project. */ - configuredCredentialKeys: Set; -} - -// biome-ignore lint/complexity/noExcessiveCognitiveComplexity: table row with multiple computed display values (model, engine, trigger count) and layered inheritance fallbacks -function AgentRow({ - type, - config, - triggers, - integrations, - onSelect, - onDeleteRequest, - projectModel, - projectEngine, - systemDefaults, - configuredCredentialKeys, -}: AgentRowProps) { - const label = (AGENT_LABELS as Record)[type] ?? type; - const activeTriggerCount = countActiveTriggers(triggers, integrations); - const modelInfo = config?.model ?? null; - const engineInfo = config?.agentEngine ?? null; - const hasCustomEngineSettings = - config?.agentEngineSettings != null && Object.keys(config.agentEngineSettings).length > 0; - - // Fallback display: show inherited model/engine when agent has no specific override - const inheritedModel = projectModel ?? systemDefaults?.model ?? null; - const inheritedEngine = projectEngine ?? systemDefaults?.agentEngine ?? null; - const displayModel = modelInfo ?? (inheritedModel ? `${inheritedModel} (inherited)` : null); - const displayEngine = engineInfo ?? (inheritedEngine ? `${inheritedEngine} (inherited)` : null); - - // Check if the agent's effective engine has credentials configured - // Only check when there is an explicit agent-level engine override - const agentEngineId = config?.agentEngine ?? null; - const hasMissingCredentials = - agentEngineId !== null && !engineHasCredentials(agentEngineId, configuredCredentialKeys); - - return ( - onSelect(type)}> - {label} - - {activeTriggerCount === 0 ? ( - - Inactive - - ) : config ? ( -
- - Configured - - {hasMissingCredentials && ( - - - - - Missing credentials - - - - This agent uses the {agentEngineId} engine but no credentials are configured for - it. Configure credentials on the Harness tab. - - - )} -
- ) : ( - - Default - - )} -
- - {displayModel || displayEngine ? ( - - {displayEngine && {displayEngine}} - {displayEngine && displayModel && · } - {displayModel && {displayModel}} - {hasCustomEngineSettings && ( - - Custom settings - - )} - - ) : ( - - )} - - - {activeTriggerCount > 0 ? ( - {activeTriggerCount} active - ) : ( - - - - - None - - - - No triggers configured — this agent won't process any events - - - )} - - -
- {config && ( - - )} - -
-
-
- ); -} - -interface AgentListViewProps { - enabledAgentTypes: string[]; - availableAgentTypes: string[]; - configByAgent: Map; - triggersByAgent: Map; - integrations: { pm: string | null; scm: string | null }; - onSelect: (agentType: string) => void; - onDelete: (id: number) => void; - onEnable: (agentType: string) => void; - isDeleting: boolean; - isEnabling: boolean; - projectModel: string | null; - projectEngine: string | null; - systemDefaults: SystemDefaults | undefined; - /** Set of credential env-var keys that are configured for this project. */ - configuredCredentialKeys: Set; -} - -function AgentListView({ - enabledAgentTypes, - availableAgentTypes, - configByAgent, - triggersByAgent, - integrations, - onSelect, - onDelete, - onEnable, - isDeleting, - isEnabling, - projectModel, - projectEngine, - systemDefaults, - configuredCredentialKeys, -}: AgentListViewProps) { - const [deleteTarget, setDeleteTarget] = useState<{ id: number; label: string } | null>(null); - - return ( - <> - {enabledAgentTypes.length === 0 ? ( -
- No agents enabled. Enable agents below to start processing. -
- ) : ( -
- - - - - Agent - Status - Engine / Model - Active Triggers - - - - - {enabledAgentTypes.map((type) => ( - setDeleteTarget({ id, label })} - projectModel={projectModel} - projectEngine={projectEngine} - systemDefaults={systemDefaults} - configuredCredentialKeys={configuredCredentialKeys} - /> - ))} - -
-
-
- )} - - {availableAgentTypes.length > 0 && ( -
-

Available Agents

-
- {availableAgentTypes.map((agentType) => { - const label = - (AGENT_LABELS as Record)[agentType] ?? agentType; - return ( -
- {label} - -
- ); - })} -
-
- )} - - !open && setDeleteTarget(null)}> - - - Delete Agent Config - - Are you sure you want to delete the config for {deleteTarget?.label}? - The agent will be disabled and no longer process any events. This action cannot be - undone. - - - - Cancel - { - if (deleteTarget) { - onDelete(deleteTarget.id); - setDeleteTarget(null); - } - }} - className="bg-destructive text-destructive-foreground hover:bg-destructive/90" - > - {isDeleting ? 'Deleting...' : 'Delete'} - - - - - - ); -} - -// ============================================================================ -// Agent Detail View -// ============================================================================ - -interface AgentDetailViewProps { - agentType: string; - projectId: string; - config: AgentConfig | null; - triggers: ResolvedTrigger[]; - integrations: { pm: string | null; scm: string | null }; - engines: Engine[]; - isSaving: boolean; - onSaveConfig: (agentType: string, configId: number | null, values: SaveConfigValues) => void; - saveSuccessNonce: number; - onDeleteConfig: (id: number) => void; - onTriggerToggle: (agentType: string, event: string, enabled: boolean) => void; - onTriggerParamChange: ( - agentType: string, - event: string, - parameters: Record, - currentEnabled: boolean, - ) => void; - onBack: () => void; - projectModel: string | null; - projectEngine: string | null; - projectMaxIterations: number | null; - systemDefaults: SystemDefaults | undefined; -} - -function AgentDetailView({ - agentType, - projectId, - config, - triggers, - integrations, - engines, - isSaving, - onSaveConfig, - saveSuccessNonce, - onDeleteConfig, - onTriggerToggle, - onTriggerParamChange, - onBack, - projectModel, - projectEngine, - projectMaxIterations, - systemDefaults, -}: AgentDetailViewProps) { - const label = (AGENT_LABELS as Record)[agentType] ?? agentType; - - return ( -
-
- -
-
-

{label}

-

- Configure model, engine, and trigger settings for the {label} agent. -

-
- { - onDeleteConfig(id); - onBack(); - }} - onTriggerToggle={onTriggerToggle} - onTriggerParamChange={onTriggerParamChange} - projectModel={projectModel} - projectEngine={projectEngine} - projectMaxIterations={projectMaxIterations} - systemDefaults={systemDefaults} - /> -
- ); -} +import { AgentDetailView } from './agent-config-detail.js'; +import { AgentListView } from './agent-config-list.js'; +import type { + AgentConfig, + Engine, + SaveConfigValues, + SystemDefaults, +} from './agent-config-types.js'; // ============================================================================ // Main Component @@ -1055,7 +190,7 @@ export function ProjectAgentConfigs({ projectId }: { projectId: string }) { // Project-level and system-level defaults for inheritance display const projectData = projectQuery.data; - const systemDefaults = defaultsQuery.data + const systemDefaults: SystemDefaults | undefined = defaultsQuery.data ? { model: defaultsQuery.data.model, maxIterations: defaultsQuery.data.maxIterations, From d643527ef9e23f576b627f455249a4e4e2aad624 Mon Sep 17 00:00:00 2001 From: aaight Date: Sat, 4 Apr 2026 17:03:17 +0200 Subject: [PATCH 18/52] fix(webhooks): relax webhook CRUD from superAdminProcedure to adminProcedure (#1086) Co-authored-by: Cascade Bot --- src/api/routers/webhooks.ts | 8 ++++---- tests/unit/api/routers/webhooks.test.ts | 22 +++++++++++++++++++--- 2 files changed, 23 insertions(+), 7 deletions(-) diff --git a/src/api/routers/webhooks.ts b/src/api/routers/webhooks.ts index 95d90e76..1aa412c4 100644 --- a/src/api/routers/webhooks.ts +++ b/src/api/routers/webhooks.ts @@ -1,5 +1,5 @@ import { z } from 'zod'; -import { router, superAdminProcedure } from '../trpc.js'; +import { adminProcedure, router } from '../trpc.js'; import { applyOneTimeTokens, oneTimeTokensSchema, @@ -23,7 +23,7 @@ import type { export type { GitHubWebhook, JiraWebhookInfo, SentryWebhookInfo, TrelloWebhook }; export const webhooksRouter = router({ - list: superAdminProcedure + list: adminProcedure .input( z.object({ projectId: z.string(), @@ -65,7 +65,7 @@ export const webhooksRouter = router({ }; }), - create: superAdminProcedure + create: adminProcedure .input( z.object({ projectId: z.string(), @@ -161,7 +161,7 @@ export const webhooksRouter = router({ return results; }), - delete: superAdminProcedure + delete: adminProcedure .input( z.object({ projectId: z.string(), diff --git a/tests/unit/api/routers/webhooks.test.ts b/tests/unit/api/routers/webhooks.test.ts index 5c170c6b..18b27735 100644 --- a/tests/unit/api/routers/webhooks.test.ts +++ b/tests/unit/api/routers/webhooks.test.ts @@ -1,5 +1,5 @@ import { describe, expect, it, vi } from 'vitest'; -import { createMockSuperAdmin, createMockUser } from '../../../helpers/factories.js'; +import { createMockUser } from '../../../helpers/factories.js'; import { createCallerFor, expectTRPCError, @@ -75,7 +75,7 @@ import { webhooksRouter } from '../../../../src/api/routers/webhooks.js'; const createCaller = createCallerFor(webhooksRouter); -const mockUser = createMockSuperAdmin(); +const mockUser = createMockUser(); const mockProject = { id: 'my-project', @@ -220,9 +220,25 @@ describe('webhooksRouter', () => { await expectTRPCError(caller.list({ projectId: 'my-project' }), 'UNAUTHORIZED'); }); - it('throws FORBIDDEN for admin role (not superadmin)', async () => { + it('allows admin role to list webhooks', async () => { + setupProjectContext(); + + mockFetch.mockResolvedValue({ + ok: true, + json: () => Promise.resolve([]), + }); + mockListWebhooks.mockResolvedValue({ data: [] }); + const adminUser = createMockUser({ role: 'admin' }); const caller = createCaller({ user: adminUser, effectiveOrgId: adminUser.orgId }); + const result = await caller.list({ projectId: 'my-project' }); + expect(result.trello).toEqual([]); + expect(result.github).toEqual([]); + }); + + it('throws FORBIDDEN for member role', async () => { + const memberUser = createMockUser({ role: 'member' }); + const caller = createCaller({ user: memberUser, effectiveOrgId: memberUser.orgId }); await expectTRPCError(caller.list({ projectId: 'my-project' }), 'FORBIDDEN'); }); From bbe33cd36c88a25bc57d748190d98e0bba8b8aed Mon Sep 17 00:00:00 2001 From: aaight Date: Wed, 8 Apr 2026 21:22:39 +0200 Subject: [PATCH 19/52] chore: remove Squint codebase intelligence support (#1091) * chore: remove Squint codebase intelligence support * chore: remove leftover SQUINT_DB_PATH entries from engine env allowlists Remove dead `SQUINT_DB_PATH` entries from the engine environment allowlists in claude-code, codex, and opencode backends (both env.ts and index.ts), and remove the misleading documentation example in docs/adding-engines.md. Co-Authored-By: Claude Opus 4.6 --------- Co-authored-by: Cascade Bot Co-authored-by: Claude Opus 4.6 --- .gitignore | 3 - CLAUDE.md | 2 +- Dockerfile.worker | 4 +- docs/adding-engines.md | 3 - docs/architecture/03-trigger-system.md | 1 - docs/architecture/04-agent-system.md | 2 +- src/agents/definitions/contextSteps.ts | 29 --- src/agents/definitions/implementation.yaml | 4 +- src/agents/definitions/planning.yaml | 4 +- src/agents/definitions/resolve-conflicts.yaml | 2 +- src/agents/definitions/respond-to-ci.yaml | 2 +- .../respond-to-planning-comment.yaml | 2 +- .../definitions/respond-to-pr-comment.yaml | 2 +- src/agents/definitions/respond-to-review.yaml | 2 +- src/agents/definitions/review.yaml | 6 +- src/agents/definitions/schema.ts | 1 - src/agents/definitions/splitting.yaml | 4 +- src/agents/definitions/strategies.ts | 2 - src/agents/prompts/index.ts | 8 - .../prompts/templates/implementation.eta | 12 +- .../templates/partials/pattern-discovery.eta | 3 +- .../templates/partials/squint-exploration.eta | 113 ----------- .../prompts/templates/partials/tmux.eta | 2 +- src/agents/prompts/templates/planning.eta | 10 +- .../prompts/templates/resolve-conflicts.eta | 2 - .../prompts/templates/respond-to-ci.eta | 2 - .../templates/respond-to-planning-comment.eta | 9 +- .../templates/respond-to-pr-comment.eta | 2 - .../prompts/templates/respond-to-review.eta | 2 - src/agents/prompts/templates/review.eta | 52 +---- src/agents/prompts/templates/splitting.eta | 10 +- src/agents/shared/builderFactory.ts | 5 - src/agents/shared/executionPipeline.ts | 14 +- src/agents/shared/promptContext.ts | 4 - src/backends/adapter.ts | 2 - src/backends/claude-code/env.ts | 3 - src/backends/codex/env.ts | 3 - src/backends/codex/index.ts | 2 - src/backends/opencode/env.ts | 3 - src/backends/opencode/index.ts | 2 - src/backends/secretOrchestrator.ts | 2 - src/config/schema.ts | 1 - .../migrations/0048_remove_squint_db_url.sql | 1 + src/db/migrations/meta/_journal.json | 7 + src/db/repositories/configMapper.ts | 3 - src/db/schema/projects.ts | 1 - src/gadgets/tmux/TmuxGadget.ts | 25 --- src/utils/squintDb.ts | 98 --------- tests/unit/agents/definitions/loader.test.ts | 5 +- .../agents/definitions/strategies.test.ts | 6 - tests/unit/agents/prompts.test.ts | 142 ------------- .../unit/agents/shared/builderFactory.test.ts | 29 +-- .../agents/shared/executionPipeline.test.ts | 7 - .../unit/agents/shared/promptContext.test.ts | 68 ------- tests/unit/backends/agent-profiles.test.ts | 132 +------------ .../unit/db/repositories/configMapper.test.ts | 8 - tests/unit/repo-hygiene.test.ts | 11 -- tests/unit/utils/squintDb.test.ts | 187 ------------------ 58 files changed, 55 insertions(+), 1018 deletions(-) delete mode 100644 src/agents/prompts/templates/partials/squint-exploration.eta create mode 100644 src/db/migrations/0048_remove_squint_db_url.sql delete mode 100644 src/utils/squintDb.ts delete mode 100644 tests/unit/utils/squintDb.test.ts diff --git a/.gitignore b/.gitignore index caf9324a..2d3a7e55 100644 --- a/.gitignore +++ b/.gitignore @@ -34,9 +34,6 @@ npm-debug.log* tmp-*.ts tmp-*.sh -# Generated databases -.squint.db - # Local agent workspace (repos, logs) workspace/ diff --git a/CLAUDE.md b/CLAUDE.md index d288d819..b4c68f24 100644 --- a/CLAUDE.md +++ b/CLAUDE.md @@ -217,7 +217,7 @@ CASCADE stores all project configuration in PostgreSQL. The `config/projects.jso ### Schema - `organizations` - Organization definitions (multi-tenant support) -- `projects` - Per-project config (repo, base branch, budget, engine, and per-project overrides for model, iterations, timeouts, progress model/interval, `squint_db_url`, `run_links_enabled`, `max_in_flight_items`) +- `projects` - Per-project config (repo, base branch, budget, engine, and per-project overrides for model, iterations, timeouts, progress model/interval, `run_links_enabled`, `max_in_flight_items`) - `project_integrations` - Integration configs per project with `category` (pm/scm), `provider` (trello/jira/github), `config` JSONB, and `triggers` JSONB. One PM + one SCM per project (enforced by unique constraint) - `project_credentials` - Project-scoped credentials keyed by `(projectId, envVarKey)`. Stores all credential types (GitHub tokens, Trello keys, JIRA tokens, LLM API keys). Encrypted at rest when `CREDENTIAL_MASTER_KEY` is set - `agent_configs` - Per-agent-type overrides (model, iterations, engine, `agent_engine_settings`, max_concurrency, `system_prompt`, `task_prompt`), project-scoped only (`project_id NOT NULL`) diff --git a/Dockerfile.worker b/Dockerfile.worker index 4c2484d6..c4c44e69 100644 --- a/Dockerfile.worker +++ b/Dockerfile.worker @@ -15,8 +15,8 @@ RUN npm run build FROM node:22-bookworm AS production WORKDIR /app -# Install pnpm and squint globally (some repos use pnpm, squint for codebase analysis) -RUN npm install -g pnpm @zbigniewsobiecki/squint@^1.10.2 --force +# Install pnpm globally (some repos use pnpm) +RUN npm install -g pnpm --force # Install system packages needed by agent runtime RUN apt-get update && apt-get install -y \ diff --git a/docs/adding-engines.md b/docs/adding-engines.md index 170cfeca..f7a872f6 100644 --- a/docs/adding-engines.md +++ b/docs/adding-engines.md @@ -113,9 +113,6 @@ export const ALLOWED_ENV_EXACT = new Set([ // My Engine auth 'MY_ENGINE_API_KEY', - - // Squint (pass through so agents can use AST tooling) - 'SQUINT_DB_PATH', ]); ``` diff --git a/docs/architecture/03-trigger-system.md b/docs/architecture/03-trigger-system.md index 1c44b24c..4674433a 100644 --- a/docs/architecture/03-trigger-system.md +++ b/docs/architecture/03-trigger-system.md @@ -143,7 +143,6 @@ Each trigger in a YAML agent definition can declare a `contextPipeline` — an o |------|---------| | `directoryListing` | List repository file structure | | `contextFiles` | Read key project files (README, etc.) | -| `squint` | Query Squint semantic index | | `workItem` | Fetch work item details from PM tool | | `prepopulateTodos` | Pre-populate todo list from work item checklists | | `prContext` | Fetch PR details, diff, reviews | diff --git a/docs/architecture/04-agent-system.md b/docs/architecture/04-agent-system.md index e385c82e..df4a7d0e 100644 --- a/docs/architecture/04-agent-system.md +++ b/docs/architecture/04-agent-system.md @@ -42,7 +42,7 @@ triggers: type: select options: [todo] defaultValue: todo - contextPipeline: [directoryListing, contextFiles, squint, workItem, prepopulateTodos] + contextPipeline: [directoryListing, contextFiles, workItem, prepopulateTodos] prompts: taskPrompt: | diff --git a/src/agents/definitions/contextSteps.ts b/src/agents/definitions/contextSteps.ts index f60ce477..9c59e6b0 100644 --- a/src/agents/definitions/contextSteps.ts +++ b/src/agents/definitions/contextSteps.ts @@ -5,7 +5,6 @@ * These are the building blocks composed by the YAML contextPipeline arrays. */ -import { execFileSync } from 'node:child_process'; import { formatCheckStatus } from '../../gadgets/github/core/getPRChecks.js'; import { ListDirectory } from '../../gadgets/ListDirectory.js'; import { readWorkItem, readWorkItemWithMedia } from '../../gadgets/pm/core/readWorkItem.js'; @@ -23,7 +22,6 @@ import { getPMProviderOrNull, MAX_IMAGES_PER_WORK_ITEM } from '../../pm/index.js import { getSentryClient } from '../../sentry/client.js'; import type { AgentInput, ProjectConfig } from '../../types/index.js'; import { parseRepoFullName } from '../../utils/repo.js'; -import { resolveSquintDbPath } from '../../utils/squintDb.js'; import type { ContextInjection, LogWriter } from '../contracts/index.js'; import { formatPRComments, @@ -81,33 +79,6 @@ export function fetchContextFilesStep(params: FetchContextParams): ContextInject })); } -export function fetchSquintStep(params: FetchContextParams): ContextInjection[] { - const squintDb = resolveSquintDbPath(params.repoDir); - if (!squintDb) return []; - - try { - const output = execFileSync('squint', ['overview', '-d', squintDb], { - encoding: 'utf-8', - timeout: 30_000, - }); - if (!output?.trim()) return []; - - return [ - { - toolName: 'SquintOverview', - params: { - comment: 'Pre-fetching Squint codebase overview for context', - database: squintDb, - }, - result: output, - description: 'Pre-fetched Squint codebase overview', - }, - ]; - } catch { - return []; - } -} - export async function fetchWorkItemStep(params: FetchContextParams): Promise { if (!params.input.workItemId) return []; try { diff --git a/src/agents/definitions/implementation.yaml b/src/agents/definitions/implementation.yaml index f125c472..5b4a6a17 100644 --- a/src/agents/definitions/implementation.yaml +++ b/src/agents/definitions/implementation.yaml @@ -35,7 +35,7 @@ triggers: label: Target Status options: [todo] defaultValue: todo - contextPipeline: [directoryListing, contextFiles, squint, workItem, prepopulateTodos] + contextPipeline: [directoryListing, contextFiles, workItem, prepopulateTodos] - event: pm:label-added label: Ready to Process Label description: Trigger when Ready to Process label added to a card in the Todo list @@ -46,7 +46,7 @@ triggers: label: Target List options: [todo] defaultValue: todo - contextPipeline: [directoryListing, contextFiles, squint, workItem, prepopulateTodos] + contextPipeline: [directoryListing, contextFiles, workItem, prepopulateTodos] strategies: {} diff --git a/src/agents/definitions/planning.yaml b/src/agents/definitions/planning.yaml index 0c878ac5..ad7c1d64 100644 --- a/src/agents/definitions/planning.yaml +++ b/src/agents/definitions/planning.yaml @@ -32,7 +32,7 @@ triggers: label: Target Status options: [planning] defaultValue: planning - contextPipeline: [directoryListing, contextFiles, squint, workItem] + contextPipeline: [directoryListing, contextFiles, workItem] - event: pm:label-added label: Ready to Process Label description: Trigger when Ready to Process label added to a card in Planning list @@ -43,7 +43,7 @@ triggers: label: Target List options: [planning] defaultValue: planning - contextPipeline: [directoryListing, contextFiles, squint, workItem] + contextPipeline: [directoryListing, contextFiles, workItem] strategies: {} hooks: diff --git a/src/agents/definitions/resolve-conflicts.yaml b/src/agents/definitions/resolve-conflicts.yaml index c7a7591f..77fcbc7d 100644 --- a/src/agents/definitions/resolve-conflicts.yaml +++ b/src/agents/definitions/resolve-conflicts.yaml @@ -30,7 +30,7 @@ triggers: description: Trigger when a PR has merge conflicts with the base branch defaultEnabled: false providers: [github] - contextPipeline: [prContext, directoryListing, contextFiles, squint, workItem] + contextPipeline: [prContext, directoryListing, contextFiles, workItem] strategies: {} diff --git a/src/agents/definitions/respond-to-ci.yaml b/src/agents/definitions/respond-to-ci.yaml index 23d64ae9..680ed114 100644 --- a/src/agents/definitions/respond-to-ci.yaml +++ b/src/agents/definitions/respond-to-ci.yaml @@ -31,7 +31,7 @@ triggers: description: Trigger when CI checks fail defaultEnabled: false providers: [github] - contextPipeline: [prContext, directoryListing, contextFiles, squint, workItem] + contextPipeline: [prContext, directoryListing, contextFiles, workItem] strategies: {} diff --git a/src/agents/definitions/respond-to-planning-comment.yaml b/src/agents/definitions/respond-to-planning-comment.yaml index b2f1db7a..6ee3989b 100644 --- a/src/agents/definitions/respond-to-planning-comment.yaml +++ b/src/agents/definitions/respond-to-planning-comment.yaml @@ -27,7 +27,7 @@ triggers: label: Comment @mention description: Trigger when bot is @mentioned in a card/issue comment defaultEnabled: false - contextPipeline: [directoryListing, contextFiles, squint, workItem] + contextPipeline: [directoryListing, contextFiles, workItem] strategies: {} diff --git a/src/agents/definitions/respond-to-pr-comment.yaml b/src/agents/definitions/respond-to-pr-comment.yaml index b962b80c..da1ac847 100644 --- a/src/agents/definitions/respond-to-pr-comment.yaml +++ b/src/agents/definitions/respond-to-pr-comment.yaml @@ -29,7 +29,7 @@ triggers: description: Trigger when the implementer bot is @mentioned in a PR comment defaultEnabled: false providers: [github] - contextPipeline: [prContext, prConversation, directoryListing, contextFiles, squint] + contextPipeline: [prContext, prConversation, directoryListing, contextFiles] strategies: gadgetOptions: diff --git a/src/agents/definitions/respond-to-review.yaml b/src/agents/definitions/respond-to-review.yaml index 5a768c83..2a3a6059 100644 --- a/src/agents/definitions/respond-to-review.yaml +++ b/src/agents/definitions/respond-to-review.yaml @@ -30,7 +30,7 @@ triggers: description: Trigger when a review with changes requested or comments is submitted defaultEnabled: false providers: [github] - contextPipeline: [prContext, prConversation, directoryListing, contextFiles, squint] + contextPipeline: [prContext, prConversation, directoryListing, contextFiles] strategies: gadgetOptions: diff --git a/src/agents/definitions/review.yaml b/src/agents/definitions/review.yaml index cda573ba..e1135f63 100644 --- a/src/agents/definitions/review.yaml +++ b/src/agents/definitions/review.yaml @@ -36,13 +36,13 @@ triggers: description: Filter PRs by author type options: [own, external, all] defaultValue: own - contextPipeline: [prContext, contextFiles, squint] + contextPipeline: [prContext, contextFiles] - event: scm:review-requested label: On Review Requested description: Trigger review when a CASCADE persona is explicitly requested as reviewer defaultEnabled: false providers: [github] - contextPipeline: [prContext, contextFiles, squint] + contextPipeline: [prContext, contextFiles] - event: scm:pr-opened label: PR Opened description: Trigger review when a new PR is opened (without waiting for CI) @@ -55,7 +55,7 @@ triggers: description: Filter PRs by author type options: [own, external, all] defaultValue: own - contextPipeline: [prContext, contextFiles, squint] + contextPipeline: [prContext, contextFiles] strategies: {} prompts: diff --git a/src/agents/definitions/schema.ts b/src/agents/definitions/schema.ts index 722c748e..8124bc20 100644 --- a/src/agents/definitions/schema.ts +++ b/src/agents/definitions/schema.ts @@ -75,7 +75,6 @@ export const TriggerParameterSchema = z export const CONTEXT_STEP_NAMES = [ 'directoryListing', 'contextFiles', - 'squint', 'workItem', 'prepopulateTodos', 'prContext', diff --git a/src/agents/definitions/splitting.yaml b/src/agents/definitions/splitting.yaml index e8dfa87e..31ce82a2 100644 --- a/src/agents/definitions/splitting.yaml +++ b/src/agents/definitions/splitting.yaml @@ -33,7 +33,7 @@ triggers: label: Target Status options: [splitting] defaultValue: splitting - contextPipeline: [directoryListing, contextFiles, squint, workItem] + contextPipeline: [directoryListing, contextFiles, workItem] - event: pm:label-added label: Ready to Process Label description: Trigger when Ready to Process label added to a card in Splitting list @@ -44,7 +44,7 @@ triggers: label: Target List options: [splitting] defaultValue: splitting - contextPipeline: [directoryListing, contextFiles, squint, workItem] + contextPipeline: [directoryListing, contextFiles, workItem] strategies: {} diff --git a/src/agents/definitions/strategies.ts b/src/agents/definitions/strategies.ts index dc4d4304..23fd5f2e 100644 --- a/src/agents/definitions/strategies.ts +++ b/src/agents/definitions/strategies.ts @@ -17,7 +17,6 @@ import { fetchPipelineSnapshotStep, fetchPRContextStep, fetchPRConversationStep, - fetchSquintStep, fetchWorkItemStep, prepopulateTodosStep, } from './contextSteps.js'; @@ -32,7 +31,6 @@ export const CONTEXT_STEP_REGISTRY: Record< > = { directoryListing: fetchDirectoryListingStep, contextFiles: fetchContextFilesStep, - squint: fetchSquintStep, workItem: fetchWorkItemStep, prepopulateTodos: prepopulateTodosStep, prContext: fetchPRContextStep, diff --git a/src/agents/prompts/index.ts b/src/agents/prompts/index.ts index 48a5d75d..4f7b84b0 100644 --- a/src/agents/prompts/index.ts +++ b/src/agents/prompts/index.ts @@ -69,9 +69,6 @@ export interface PromptContext { // Capacity / pipeline management maxInFlightItems?: number; - // Squint codebase intelligence - squintEnabled?: boolean; - // Future extensibility [key: string]: unknown; } @@ -342,11 +339,6 @@ export function getTemplateVariables(): Array<{ group: 'Capacity', description: 'Maximum number of items allowed in the active pipeline at once (default: 1)', }, - { - name: 'squintEnabled', - group: 'Squint', - description: 'Whether the repository has a Squint database (.squint.db) available', - }, ]; } diff --git a/src/agents/prompts/templates/implementation.eta b/src/agents/prompts/templates/implementation.eta index 0e581f75..a3b59160 100644 --- a/src/agents/prompts/templates/implementation.eta +++ b/src/agents/prompts/templates/implementation.eta @@ -6,11 +6,8 @@ You are an expert software engineer implementing features and fixing issues base ### Phase 1: Understand 1. **Review the pre-loaded work item data** and verify your pre-populated todo list matches the implementation plan -<% if (it.squintEnabled) { %>2. **Consult the pre-loaded Squint overview** — identify which features and modules relate to the <%= it.workItemNoun || 'card' %> -3. **Use the Squint Codebase Intelligence Protocol (below)** to explore features, flows, and modules before reading source files -4. **Read codebase guidelines** - CLAUDE.md, AGENTS.md (these are meta-docs about conventions) -5. **THEN read source files** — only the files Squint identified as relevant<% } else { %>2. **Read codebase guidelines** - CLAUDE.md, AGENTS.md (these are meta-docs about conventions) -3. **Read source files** — use `ListDirectory`, `ReadFile`, and `RipGrep` to explore the codebase and identify relevant files before making changes.<% } %> +2. **Read codebase guidelines** - CLAUDE.md, AGENTS.md (these are meta-docs about conventions) +3. **Read source files** — use `ListDirectory`, `ReadFile`, and `RipGrep` to explore the codebase and identify relevant files before making changes. ### Phase 2: Prepare @@ -20,8 +17,7 @@ You are an expert software engineer implementing features and fixing issues base ### Phase 3: Implement 6. **For each file to modify:** -<% if (it.squintEnabled) { %> - Use `squint symbols show --file --json` via Tmux first to understand the file's architectural role -<% } %> - Read 1-2 similar files for patterns and conventions + - Read 1-2 similar files for patterns and conventions - Make changes - Verify no diagnostics errors before moving to next file - Write tests leveraging patterns and helpers you found @@ -36,8 +32,6 @@ You are an expert software engineer implementing features and fixing issues base 9. **Mark acceptance criteria complete** using UpdateChecklistItem for each criterion you've implemented 10. **Post summary comment** on the <%= it.workItemNoun || 'card' %> describing what was implemented and linking to the PR -<%~ include("partials/squint-exploration") %> - <%~ include("partials/task-tracking") %> <%~ include("partials/pattern-discovery") %> diff --git a/src/agents/prompts/templates/partials/pattern-discovery.eta b/src/agents/prompts/templates/partials/pattern-discovery.eta index d29e6879..b04aed5d 100644 --- a/src/agents/prompts/templates/partials/pattern-discovery.eta +++ b/src/agents/prompts/templates/partials/pattern-discovery.eta @@ -1,8 +1,7 @@ ### Pattern Discovery When implementing something new: -1. Find one similar file (component, API endpoint, test)<% if (it.squintEnabled) { %> - - Use `squint symbols show --file --json` to understand a similar file's full structure before reading its source<% } %> +1. Find one similar file (component, API endpoint, test) 2. Note the imports, structure, and conventions 3. Apply the same patterns to your implementation diff --git a/src/agents/prompts/templates/partials/squint-exploration.eta b/src/agents/prompts/templates/partials/squint-exploration.eta deleted file mode 100644 index d4d382f4..00000000 --- a/src/agents/prompts/templates/partials/squint-exploration.eta +++ /dev/null @@ -1,113 +0,0 @@ -<% if (it.squintEnabled) { %> -### Squint Codebase Intelligence Protocol (MANDATORY) - -**DO NOT read source files until you have completed these drill-down steps.** - -You have access to Squint, an AST-powered codebase analysis tool. A SquintOverview has been pre-loaded into your context showing the module tree and feature list. Use the commands below to drill deeper before opening any files. - -**All squint commands accept `--json` for structured output. Always use `--json`.** - -#### Step 1: Identify relevant features and flows - -From the pre-loaded overview, identify which features relate to your task. For each: - -``` -squint features show --json -``` - -Look at: `flows` (user journeys crossing module boundaries), `modulesInvolved`, `interactions` (module-to-module connections). - -#### Step 2: Trace data flows across boundaries - -For each relevant flow: - -``` -squint flows show --json -``` - -Look at: `steps` (ordered interaction chain showing how data moves between modules), `definitionSteps` (function-to-function call trace with exact file paths and line numbers). This is where cross-cutting contracts become visible (e.g., API response shapes that frontend must consume). - -To trace the call graph from a specific function: - -``` -squint flows trace --name --depth 5 --json -``` - -#### Step 3: Understand module boundaries - -For each module you'll be working in: - -``` -squint modules show --json -``` - -Look at: `outgoingInteractions` (modules this one calls), `incomingInteractions` (modules that call this one), `members` (symbols with file paths), `children` (sub-modules). - -#### Step 4: Examine files before reading source code - -Before opening any file, get its structural overview: - -``` -squint files show --json -``` - -Shows: all definitions (with module + metadata), imports, imported-by relationships, and cross-file references. Use this to understand a file's role before reading its source. - -For targeted file-level symbol aggregation (all symbols, relationships, interactions, flows for one file): - -``` -squint symbols show --file --json -``` - -#### Step 5: Deep-dive on specific symbols - -When you need to understand a specific function, class, or type: - -``` -squint symbols show --json -``` - -Returns: `sourceCode` (actual source), `callSites` (where it's called from, with context lines), `relationships` (outgoing references), `incomingRelationships` (what depends on this), `flows` (which flows involve it), `module`. - -If the name is ambiguous, disambiguate with: `squint symbols show --file --json` - -#### Step 6: Advanced queries (use when needed) - -**Call hierarchy** — find all callers/callees of a function: -``` -squint hierarchy --type calls --root --depth 3 --json -``` - -**Relationships** — find connections between symbols: -``` -squint relationships list --from --json -``` - -**Domain groupings** — understand domain boundaries: -``` -squint domains show --json -``` - -**Filter symbols** — find symbols by kind, domain, or purity: -``` -squint symbols list --kind function --domain --json -``` - -#### When to read files instead of using Squint - -- Reading full source code (after squint told you which files matter) -- Searching for specific string patterns or regex -- Config files, tests, or non-code files that squint doesn't index -- Any file squint identified as relevant — squint shows structure, files show content -<% } else { %> -### Codebase Exploration Protocol - -The repository has no Squint database. Skip squint steps and proceed directly to reading files. - -Use these tools to explore the codebase: -- `ListDirectory` — list directory contents to understand structure -- `ReadFile` — read file contents for implementation details -- `RipGrep` — search for patterns in code (regex, respects gitignore) -- `AstGrep` — AST-aware code search (use $VAR for captures) -- `Tmux` — run shell commands (for exploration: grep, find, etc.) -<% } %> diff --git a/src/agents/prompts/templates/partials/tmux.eta b/src/agents/prompts/templates/partials/tmux.eta index 722e95ff..a2286c1f 100644 --- a/src/agents/prompts/templates/partials/tmux.eta +++ b/src/agents/prompts/templates/partials/tmux.eta @@ -21,4 +21,4 @@ Use the Tmux gadget for ALL shell commands: - With pipes: `command="npm test 2>&1 | head -50"` - With globs: `command="find . -name '*.ts' | xargs wc -l"` -Use unique session names like: "npm-install", "test-run", "lint-check", "build", "typecheck"<% if (it.squintEnabled) { %>, "squint-modules", "squint-features"<% } %> +Use unique session names like: "npm-install", "test-run", "lint-check", "build", "typecheck" diff --git a/src/agents/prompts/templates/planning.eta b/src/agents/prompts/templates/planning.eta index 1e5db1ce..87e9926b 100644 --- a/src/agents/prompts/templates/planning.eta +++ b/src/agents/prompts/templates/planning.eta @@ -17,15 +17,9 @@ CRITICAL: You are running in a cloned copy of the project repository. Before creating your plan: 1. **Read the <%= it.workItemNoun || 'card' %>** to identify scope signals (file names, components, features, domain terms) -<% if (it.squintEnabled) { %>2. **Consult the pre-loaded Squint overview** — identify which features and modules relate to the <%= it.workItemNoun || 'card' %> -3. **Use the Squint Codebase Intelligence Protocol (below)** to explore features, flows, and modules before reading any files -4. **THEN read specific files** — only files Squint identified as relevant -5. **Understand existing patterns** — how does the codebase already solve similar problems? -6. **Map terminology** — <%= it.workItemNoun || 'card' %> may use different terms than code<% } else { %>2. **Explore the codebase** using `ListDirectory`, `ReadFile`, `RipGrep`, and `Tmux` to identify relevant files and patterns. +2. **Explore the codebase** using `ListDirectory`, `ReadFile`, `RipGrep`, and `Tmux` to identify relevant files and patterns. 3. **Understand existing patterns** — how does the codebase already solve similar problems? -4. **Map terminology** — <%= it.workItemNoun || 'card' %> may use different terms than code<% } %> - -<%~ include("partials/squint-exploration") %> +4. **Map terminology** — <%= it.workItemNoun || 'card' %> may use different terms than code ## Codebase Pattern Analysis diff --git a/src/agents/prompts/templates/resolve-conflicts.eta b/src/agents/prompts/templates/resolve-conflicts.eta index 84b2600e..c828a3f3 100644 --- a/src/agents/prompts/templates/resolve-conflicts.eta +++ b/src/agents/prompts/templates/resolve-conflicts.eta @@ -73,8 +73,6 @@ Always commit and push before calling Finish! <%~ include("partials/task-tracking") %> -<%~ include("partials/squint-exploration") %> - <%~ include("partials/pattern-discovery") %> <%~ include("partials/environment") %> diff --git a/src/agents/prompts/templates/respond-to-ci.eta b/src/agents/prompts/templates/respond-to-ci.eta index e8f32894..6c5ab4d2 100644 --- a/src/agents/prompts/templates/respond-to-ci.eta +++ b/src/agents/prompts/templates/respond-to-ci.eta @@ -43,8 +43,6 @@ You are an expert software engineer fixing CI check failures on a pull request. <%~ include("partials/task-tracking") %> -<%~ include("partials/squint-exploration") %> - <%~ include("partials/pattern-discovery") %> <%~ include("partials/environment") %> diff --git a/src/agents/prompts/templates/respond-to-planning-comment.eta b/src/agents/prompts/templates/respond-to-planning-comment.eta index d47ba88f..a114119e 100644 --- a/src/agents/prompts/templates/respond-to-planning-comment.eta +++ b/src/agents/prompts/templates/respond-to-planning-comment.eta @@ -18,13 +18,8 @@ You are running in a cloned copy of the project repository. Before updating the 1. **Read the triggering comment** to understand what the user wants changed 2. **Read the current <%= it.workItemNoun || 'card' %>** to understand the existing plan -<% if (it.squintEnabled) { %>3. **Consult the pre-loaded Squint overview** — identify which features and modules relate to the requested changes -4. **Use the Squint Codebase Intelligence Protocol (below)** to explore features, flows, and modules before reading any files -5. **THEN read specific files** — only files Squint identified as relevant -6. **Understand existing patterns** — how does the codebase already solve similar problems?<% } else { %>3. **Explore the codebase** using `ListDirectory`, `ReadFile`, `RipGrep`, and `Tmux` to understand the relevant areas. -4. **Understand existing patterns** — how does the codebase already solve similar problems?<% } %> - -<%~ include("partials/squint-exploration") %> +3. **Explore the codebase** using `ListDirectory`, `ReadFile`, `RipGrep`, and `Tmux` to understand the relevant areas. +4. **Understand existing patterns** — how does the codebase already solve similar problems? ## Comment Classification diff --git a/src/agents/prompts/templates/respond-to-pr-comment.eta b/src/agents/prompts/templates/respond-to-pr-comment.eta index 0912ab92..44472d08 100644 --- a/src/agents/prompts/templates/respond-to-pr-comment.eta +++ b/src/agents/prompts/templates/respond-to-pr-comment.eta @@ -38,8 +38,6 @@ A user @mentioned you in a PR comment. Read their request and do exactly what th <%~ include("partials/task-tracking") %> -<%~ include("partials/squint-exploration") %> - <%~ include("partials/pattern-discovery") %> <%~ include("partials/environment") %> diff --git a/src/agents/prompts/templates/respond-to-review.eta b/src/agents/prompts/templates/respond-to-review.eta index 3753b2ba..39f78a50 100644 --- a/src/agents/prompts/templates/respond-to-review.eta +++ b/src/agents/prompts/templates/respond-to-review.eta @@ -35,8 +35,6 @@ You are an expert software engineer addressing code review feedback on a pull re <%~ include("partials/task-tracking") %> -<%~ include("partials/squint-exploration") %> - <%~ include("partials/pattern-discovery") %> <%~ include("partials/environment") %> diff --git a/src/agents/prompts/templates/review.eta b/src/agents/prompts/templates/review.eta index 8795dd32..aca1a65e 100644 --- a/src/agents/prompts/templates/review.eta +++ b/src/agents/prompts/templates/review.eta @@ -13,7 +13,7 @@ CRITICAL: **Accuracy over thoroughness**: A review with zero comments that correctly approves good code is better than a review that invents problems. Only report issues you can demonstrate. -**Architecture-aware**: Code can be correct yet harmful. A well-tested function in the wrong module, a clean implementation that duplicates an existing pattern, a performant solution that creates tight coupling — these are real problems even though every line compiles and every test passes.<% if (it.squintEnabled) { %> Use squint to see the forest, not just the trees.<% } %> +**Architecture-aware**: Code can be correct yet harmful. A well-tested function in the wrong module, a clean implementation that duplicates an existing pattern, a performant solution that creates tight coupling — these are real problems even though every line compiles and every test passes. ## Process @@ -21,38 +21,7 @@ CRITICAL: 1. **Understand the change**: Read the PR description and all modified files. Understand WHAT changed and WHY. An initial comment has already been posted on the PR acknowledging the review is in progress. -<% if (it.squintEnabled) { %>2. **Use the Squint Codebase Intelligence Protocol (below)** to understand the codebase structure before reading files. Focus on modules touched by the PR to verify changes fit established patterns. - -<%~ include("partials/squint-exploration") %> - -#### Squint for Conflict Detection - -Beyond general exploration, use squint specifically to detect conflicts and violations: - -- **Dependency direction audit**: For each new import, run `squint modules show` on both the importing and imported modules. Verify the dependency is consistent with existing arrows — a new arrow pointing "upstream" is a red flag. -- **Duplicate symbol detection**: For each significant new export (function, class, constant), run `squint symbols show ` to check if overlapping functionality already exists elsewhere. -- **Feature flow impact**: For affected features, run `squint features show ` and verify the PR changes are consistent with the existing data flow direction. A change that sends data backward through a flow deserves scrutiny. - -3. **Analyze architectural impact**: Before reading implementation details, answer these strategic questions using squint data: - - **Boundary & Responsibility:** - - Do changes respect module boundaries shown by squint? - - Is logic in the right layer? (business logic in controllers = bad) - - Do new dependencies point in the correct direction? Use `squint modules show` to verify. - - **Pattern Consistency:** - - How does the codebase already solve this kind of problem? Use `squint features show` to find analogous flows. - - Does the PR introduce a "second way" to do something already done one way? - - Use `squint symbols show` on new functions/classes to find existing analogues. - - **Design Simplicity:** - - Could the same goal be achieved with fewer files/abstractions/layers? - - Does the PR introduce indirection not justified by current complexity? - - **Feature/UX Impact:** - - Trace affected feature flows with `squint features show`. Does the change create inconsistent behavior? - - What does the user see when this code fails? Edge cases in UX? -<% } else { %>2. **Explore the codebase** using `ListDirectory`, `ReadFile`, `RipGrep`, and `Tmux` to understand the context of the changes. Read CLAUDE.md and README.md for project conventions. +2. **Explore the codebase** using `ListDirectory`, `ReadFile`, `RipGrep`, and `Tmux` to understand the context of the changes. Read CLAUDE.md and README.md for project conventions. 3. **Analyze architectural impact**: Before reading implementation details, answer these strategic questions: @@ -71,7 +40,6 @@ Beyond general exploration, use squint specifically to detect conflicts and viol **Feature/UX Impact:** - Does the change create inconsistent behavior? - What does the user see when this code fails? Edge cases in UX? -<% } %> ### Phase 2: Tactical Verification @@ -100,7 +68,7 @@ Issues that **must** be fixed before merge: - Correctness bugs (with failing scenario) - Data loss or corruption risks - Breaking changes to public APIs -- Dependency cycle introduction<% if (it.squintEnabled) { %> (confirmed via squint)<% } %> +- Dependency cycle introduction - Responsibility violation (business logic where it can't be tested/reused) ### SHOULD_FIX (use REQUEST_CHANGES or COMMENT) @@ -110,7 +78,7 @@ Real issues worth addressing: - Incomplete implementations that will cause problems - Test coverage gaps for critical paths - Pattern conflict (second way to do something already done consistently one way) -- Abstraction misfit (new abstraction misaligned with module boundaries<% if (it.squintEnabled) { %> per squint<% } %>) +- Abstraction misfit (new abstraction misaligned with module boundaries) - Unnecessary complexity (indirection not justified by current requirements) ### NITPICK (skip or brief COMMENT) @@ -169,13 +137,13 @@ Severity guide: Answer these during Phase 1 — they catch design problems that line-by-line review misses: -1. **Does this change belong here?** — Is the code in the right module?<% if (it.squintEnabled) { %> Does it align with the feature boundaries squint shows?<% } %> Would a developer looking for this functionality find it where it lives? +1. **Does this change belong here?** — Is the code in the right module? Would a developer looking for this functionality find it where it lives? -2. **Does this conflict with existing patterns?** —<% if (it.squintEnabled) { %> Use `squint symbols show` and `squint features show` to find analogous code.<% } %> Does the PR follow the same conventions? Is it introducing a second way to do something the codebase already does one way? A single instance is not a "pattern" — look for consistent repetition before flagging a conflict. +2. **Does this conflict with existing patterns?** — Does the PR follow the same conventions? Is it introducing a second way to do something the codebase already does one way? A single instance is not a "pattern" — look for consistent repetition before flagging a conflict. 3. **Is this the simplest solution?** — Could the same goal be achieved by extending existing code rather than creating new abstractions? Does every new file, class, or layer of indirection earn its keep against current (not hypothetical) complexity? -4. **What does the user experience?** — <% if (it.squintEnabled) { %>Trace user-facing flows affected by this change via `squint features show`. <% } %>What does the user see when this code succeeds? When it fails? Are error states handled in a way the user can act on? +4. **What does the user experience?** — What does the user see when this code succeeds? When it fails? Are error states handled in a way the user can act on? ## Anti-Patterns to Avoid @@ -208,7 +176,7 @@ Answer these during Phase 1 — they catch design problems that line-by-line rev - A 5-line bug fix doesn't need an architectural essay. ### Don't flag pattern conflicts without evidence -- <% if (it.squintEnabled) { %>Cite the existing pattern via squint — show the analogous code that does it differently.<% } else { %>Cite the existing pattern — show the analogous code that does it differently.<% } %> +- Cite the existing pattern — show the analogous code that does it differently. - A single instance is not a "pattern." Look for at least 2-3 consistent examples before claiming a conflict. ## Review Decision @@ -238,8 +206,8 @@ Use CreatePRReview with: ## Architecture & Design [Only if strategic issues found — skip entirely for clean PRs] -- **[SHOULD_FIX] Pattern conflict**: ...with evidence<% if (it.squintEnabled) { %> (squint: `squint symbols show `)<% } %> -- **[BLOCKING] Dependency violation**: ...with evidence<% if (it.squintEnabled) { %> (squint: `squint modules show `)<% } %> +- **[SHOULD_FIX] Pattern conflict**: ...with evidence +- **[BLOCKING] Dependency violation**: ...with evidence ## Code Issues [Only if tactical issues found — skip entirely for clean PRs] diff --git a/src/agents/prompts/templates/splitting.eta b/src/agents/prompts/templates/splitting.eta index c0620fa4..3614d0a6 100644 --- a/src/agents/prompts/templates/splitting.eta +++ b/src/agents/prompts/templates/splitting.eta @@ -76,15 +76,9 @@ For technical/infrastructure work, adapt the format: You are running in a cloned copy of the project repository. Before creating stories: 1. **Read the <%= it.workItemNoun || 'card' %>** to identify scope signals (file names, components, features, domain terms) -<% if (it.squintEnabled) { %>2. **Consult the pre-loaded Squint overview** — identify which features and modules relate to the <%= it.workItemNoun || 'card' %> -3. **Use the Squint Codebase Intelligence Protocol (below)** to explore features, flows, and modules before reading any files -4. **THEN read specific files** — only files Squint identified as relevant -5. **Understand existing patterns** — how does the codebase already solve similar problems? -6. **Map terminology** — <%= it.workItemNoun || 'card' %> may use different terms than the code<% } else { %>2. **Explore the codebase** using `ListDirectory`, `ReadFile`, `RipGrep`, and `Tmux` to identify relevant files and patterns. +2. **Explore the codebase** using `ListDirectory`, `ReadFile`, `RipGrep`, and `Tmux` to identify relevant files and patterns. 3. **Understand existing patterns** — how does the codebase already solve similar problems? -4. **Map terminology** — <%= it.workItemNoun || 'card' %> may use different terms than the code<% } %> - -<%~ include("partials/squint-exploration") %> +4. **Map terminology** — <%= it.workItemNoun || 'card' %> may use different terms than the code ## Context Variables diff --git a/src/agents/shared/builderFactory.ts b/src/agents/shared/builderFactory.ts index ddfb88a7..c516bdbf 100644 --- a/src/agents/shared/builderFactory.ts +++ b/src/agents/shared/builderFactory.ts @@ -12,7 +12,6 @@ import { getRateLimitForModel } from '../../config/rateLimits.js'; import { getRetryConfig } from '../../config/retryConfig.js'; import { initSessionState, type SessionHooks, setReadOnlyFs } from '../../gadgets/sessionState.js'; import type { LLMCallLogger } from '../../utils/llmLogging.js'; -import { resolveSquintDbPath } from '../../utils/squintDb.js'; import type { IProgressMonitor } from '../contracts/index.js'; import { getAgentCapabilities } from '../definitions/index.js'; import { type AccumulatedLlmCall, createObserverHooks } from '../utils/hooks.js'; @@ -58,10 +57,6 @@ export interface CreateBuilderOptions { const MAX_GADGETS_PER_RESPONSE = 25; -export function isSquintEnabled(repoDir: string): boolean { - return resolveSquintDbPath(repoDir) !== null; -} - export async function createConfiguredBuilder(options: CreateBuilderOptions): Promise { const { client, diff --git a/src/agents/shared/executionPipeline.ts b/src/agents/shared/executionPipeline.ts index 171ed04c..cd1a5322 100644 --- a/src/agents/shared/executionPipeline.ts +++ b/src/agents/shared/executionPipeline.ts @@ -4,7 +4,6 @@ import { loadCascadeEnv, unloadCascadeEnv } from '../../utils/cascadeEnv.js'; import { createFileLogger } from '../../utils/fileLogger.js'; import { setWatchdogCleanup } from '../../utils/lifecycle.js'; import { logger } from '../../utils/logging.js'; -import { setupRemoteSquintDb } from '../../utils/squintDb.js'; import { createAgentLogger } from '../utils/logging.js'; import { cleanupAgentResources } from './cleanup.js'; import type { RunTrackingInput } from './runTracking.js'; @@ -90,11 +89,6 @@ export interface AgentPipelineOptions { */ runTracking?: RunTrackingInput & { model?: string; maxIterations?: number }; - /** - * Remote Squint DB URL for projects that don't commit .squint.db. - */ - squintDbUrl?: string; - /** * Whether the repoDir was pre-existing (skip deletion on cleanup). * When true, skips temp dir deletion in cleanup. @@ -145,7 +139,7 @@ export interface FinalizeRunOutcome { * Shared agent execution scaffold used by both the llmist lifecycle and * the Claude Code backend adapter. * - * Handles: FileLogger → Watchdog → Repo setup → Env snapshot → Squint DB → + * Handles: FileLogger → Watchdog → Repo setup → Env snapshot → * Run tracking → CWD change → Execute → Restore CWD → Finalize run → Cleanup. * * The only divergent step is the `execute` callback. @@ -178,11 +172,6 @@ export async function executeAgentPipeline(options: AgentPipelineOptions): Promi try { repoDir = await options.setupRepoDir(log); const envSnapshot = loadCascadeEnv(repoDir, log); - const squintCleanup = await setupRemoteSquintDb( - repoDir, - { squintDbUrl: options.squintDbUrl }, - log, - ); if (options.runTracking) { runId = await tryCreateRun( @@ -208,7 +197,6 @@ export async function executeAgentPipeline(options: AgentPipelineOptions): Promi }); } finally { process.chdir(originalCwd); - squintCleanup?.(); unloadCascadeEnv(envSnapshot); } diff --git a/src/agents/shared/promptContext.ts b/src/agents/shared/promptContext.ts index f07cafbb..0d7a7a64 100644 --- a/src/agents/shared/promptContext.ts +++ b/src/agents/shared/promptContext.ts @@ -1,7 +1,6 @@ import { getJiraConfig, getTrelloConfig } from '../../pm/config.js'; import { getPMProviderOrNull } from '../../pm/index.js'; import type { ProjectConfig } from '../../types/index.js'; -import { resolveSquintDbPath } from '../../utils/squintDb.js'; import type { PromptContext } from '../prompts/index.js'; function getListIds(project: ProjectConfig) { @@ -52,12 +51,10 @@ export function buildPromptContext( originalWorkItemUrl: string; detectedAgentType: string; }, - repoDir?: string, ): PromptContext { const pmProvider = getPMProviderOrNull(); const listIds = getListIds(project); const terminology = getPromptTerminology(pmProvider?.type); - const squintEnabled = repoDir ? resolveSquintDbPath(repoDir) !== null : false; return { workItemId, @@ -68,7 +65,6 @@ export function buildPromptContext( pmType: pmProvider?.type, ...terminology, maxInFlightItems: project.maxInFlightItems ?? 1, - squintEnabled, ...(prContext && { prNumber: prContext.prNumber, prBranch: prContext.prBranch, diff --git a/src/backends/adapter.ts b/src/backends/adapter.ts index 20c8a8a4..6745ab65 100644 --- a/src/backends/adapter.ts +++ b/src/backends/adapter.ts @@ -48,8 +48,6 @@ export async function executeWithEngine( skipRepoDeletion: Boolean(input.logDir), - squintDbUrl: input.project.squintDbUrl, - finalizeRun: (runId, fileLogger, outcome) => finalizeEngineRun(runId, fileLogger, { status: outcome.status, diff --git a/src/backends/claude-code/env.ts b/src/backends/claude-code/env.ts index 669da801..647a6881 100644 --- a/src/backends/claude-code/env.ts +++ b/src/backends/claude-code/env.ts @@ -21,9 +21,6 @@ export const ALLOWED_ENV_EXACT = new Set([ // Claude auth 'CLAUDE_CODE_OAUTH_TOKEN', 'ANTHROPIC_API_KEY', - - // Squint - 'SQUINT_DB_PATH', ]); /** Prefix patterns — any var starting with one of these passes through. */ diff --git a/src/backends/codex/env.ts b/src/backends/codex/env.ts index c3c8d14e..5740ad66 100644 --- a/src/backends/codex/env.ts +++ b/src/backends/codex/env.ts @@ -17,9 +17,6 @@ const ALLOWED_ENV_EXACT = new Set([ // Codex auth 'OPENAI_API_KEY', - - // Squint - 'SQUINT_DB_PATH', ]); export function buildEnv( diff --git a/src/backends/codex/index.ts b/src/backends/codex/index.ts index 138de946..ed6d194a 100644 --- a/src/backends/codex/index.ts +++ b/src/backends/codex/index.ts @@ -358,8 +358,6 @@ export class CodexEngine extends NativeToolEngine { ...SHARED_ALLOWED_ENV_EXACT, // Codex auth 'OPENAI_API_KEY', - // Squint - 'SQUINT_DB_PATH', ]); } diff --git a/src/backends/opencode/env.ts b/src/backends/opencode/env.ts index e2163a99..dea08894 100644 --- a/src/backends/opencode/env.ts +++ b/src/backends/opencode/env.ts @@ -19,9 +19,6 @@ const ALLOWED_ENV_EXACT = new Set([ 'OPENAI_API_KEY', 'ANTHROPIC_API_KEY', 'OPENROUTER_API_KEY', - - // Squint - 'SQUINT_DB_PATH', ]); export function buildEnv( diff --git a/src/backends/opencode/index.ts b/src/backends/opencode/index.ts index 16c911eb..4bfefc4f 100644 --- a/src/backends/opencode/index.ts +++ b/src/backends/opencode/index.ts @@ -429,8 +429,6 @@ export class OpenCodeEngine extends NativeToolEngine { 'OPENAI_API_KEY', 'ANTHROPIC_API_KEY', 'OPENROUTER_API_KEY', - // Squint - 'SQUINT_DB_PATH', ]); } diff --git a/src/backends/secretOrchestrator.ts b/src/backends/secretOrchestrator.ts index c0b004d9..405a010d 100644 --- a/src/backends/secretOrchestrator.ts +++ b/src/backends/secretOrchestrator.ts @@ -64,8 +64,6 @@ export async function buildExecutionPlan( project, input.triggerType, prContext, - undefined, - repoDir, ); // Load DB partials for template include resolution diff --git a/src/config/schema.ts b/src/config/schema.ts index b4da6f47..60cbe368 100644 --- a/src/config/schema.ts +++ b/src/config/schema.ts @@ -83,7 +83,6 @@ export const ProjectConfigSchema = z.object({ * Used by buildExecutionPlan() to merge into the execution plan's engineSettings. */ agentEngineSettings: z.record(z.string(), EngineSettingsSchema).optional(), - squintDbUrl: z.string().url().optional(), runLinksEnabled: z.boolean().default(false), maxInFlightItems: z.number().int().positive().optional(), snapshotEnabled: z.boolean().optional(), diff --git a/src/db/migrations/0048_remove_squint_db_url.sql b/src/db/migrations/0048_remove_squint_db_url.sql new file mode 100644 index 00000000..eac1d361 --- /dev/null +++ b/src/db/migrations/0048_remove_squint_db_url.sql @@ -0,0 +1 @@ +ALTER TABLE "projects" DROP COLUMN IF EXISTS "squint_db_url"; diff --git a/src/db/migrations/meta/_journal.json b/src/db/migrations/meta/_journal.json index 70fa7d4f..b09ac4ec 100644 --- a/src/db/migrations/meta/_journal.json +++ b/src/db/migrations/meta/_journal.json @@ -337,6 +337,13 @@ "when": 1782000000000, "tag": "0047_add_alerting_integration", "breakpoints": false + }, + { + "idx": 48, + "version": "7", + "when": 1783000000000, + "tag": "0048_remove_squint_db_url", + "breakpoints": false } ] } diff --git a/src/db/repositories/configMapper.ts b/src/db/repositories/configMapper.ts index e481bf86..64889b01 100644 --- a/src/db/repositories/configMapper.ts +++ b/src/db/repositories/configMapper.ts @@ -82,7 +82,6 @@ export interface ProjectConfigRaw { progressModel?: string; progressIntervalMinutes?: number; workItemBudgetUsd?: number; - squintDbUrl?: string; engineSettings?: EngineSettings; /** Per-agent engine settings overrides keyed by agent type. */ agentEngineSettings?: Record; @@ -127,7 +126,6 @@ type ProjectRow = { workItemBudgetUsd: string | null; progressModel: string | null; progressIntervalMinutes: string | null; - squintDbUrl: string | null; agentEngine: string | null; agentEngineSettings: EngineSettings | null; runLinksEnabled: boolean; @@ -210,7 +208,6 @@ function buildBaseProjectFields(row: ProjectRow, pmType: 'trello' | 'jira'): Pro progressIntervalMinutes: numericOrUndefined(row.progressIntervalMinutes), workItemBudgetUsd: numericOrUndefined(row.workItemBudgetUsd), engineSettings: row.agentEngineSettings ?? undefined, - squintDbUrl: row.squintDbUrl ?? undefined, runLinksEnabled: row.runLinksEnabled ?? false, maxInFlightItems: row.maxInFlightItems ?? undefined, snapshotEnabled: row.snapshotEnabled ?? undefined, diff --git a/src/db/schema/projects.ts b/src/db/schema/projects.ts index 532d128f..58133bb0 100644 --- a/src/db/schema/projects.ts +++ b/src/db/schema/projects.ts @@ -23,7 +23,6 @@ export const projects = pgTable( agentEngineSettings: jsonb('agent_engine_settings').$type(), progressModel: text('progress_model'), progressIntervalMinutes: numeric('progress_interval_minutes', { precision: 5, scale: 1 }), - squintDbUrl: text('squint_db_url'), runLinksEnabled: boolean('run_links_enabled').default(false).notNull(), maxInFlightItems: integer('max_in_flight_items'), diff --git a/src/gadgets/tmux/TmuxGadget.ts b/src/gadgets/tmux/TmuxGadget.ts index a7ba131d..1ca23087 100644 --- a/src/gadgets/tmux/TmuxGadget.ts +++ b/src/gadgets/tmux/TmuxGadget.ts @@ -125,31 +125,6 @@ Commands are interpreted by bash, so pipes, &&, ||, redirects, and globs all wor 'session=test-run status=exited exit_code=0\n\n> project@1.0.0 test\n> vitest run\n\n✓ 15 tests passed', comment: 'Run tests - command completed within 120s wait period', }, - { - params: { - action: 'start', - comment: 'Exploring vehicle service module dependencies', - session: 'squint-modules', - command: 'squint modules show backend.services.vehicles --json', - wait: 15000, - }, - output: - 'session=squint-modules status=exited exit_code=0\n\n{"path":"backend.services.vehicles","description":"Vehicle business logic","files":["src/services/vehicles.service.ts"],"dependencies":["backend.data.models","shared-types.entities.vehicles"],"dependents":["backend.api.vehicles"]}', - comment: - "Use squint to see a module's files, dependencies, and dependents before reading code", - }, - { - params: { - action: 'start', - comment: 'Tracing vehicle search data flow', - session: 'squint-features', - command: 'squint features show vehicle-inventory --json', - wait: 15000, - }, - output: - 'session=squint-features status=exited exit_code=0\n\n{"slug":"vehicle-inventory","description":"Vehicle CRUD and search","flows":["VehicleController.getAll -> VehicleService.getAll -> VehicleModel.findAll"],"modules":["backend.api.vehicles","backend.services.vehicles","backend.data.models"]}', - comment: 'Use squint to trace how data flows through the system before exploring code', - }, { params: { action: 'start', diff --git a/src/utils/squintDb.ts b/src/utils/squintDb.ts deleted file mode 100644 index 22cdbdc0..00000000 --- a/src/utils/squintDb.ts +++ /dev/null @@ -1,98 +0,0 @@ -import { randomUUID } from 'node:crypto'; -import { createWriteStream, existsSync, unlinkSync } from 'node:fs'; -import { stat } from 'node:fs/promises'; -import { tmpdir } from 'node:os'; -import { join } from 'node:path'; -import { Readable } from 'node:stream'; -import { pipeline } from 'node:stream/promises'; - -/** - * Resolve the effective Squint DB path: SQUINT_DB_PATH env var, or .squint.db in repoDir. - * Returns null if neither exists. - */ -export function resolveSquintDbPath(repoDir: string): string | null { - const envPath = process.env.SQUINT_DB_PATH; - if (envPath && existsSync(envPath)) return envPath; - - const localPath = join(repoDir, '.squint.db'); - if (existsSync(localPath)) return localPath; - - return null; -} - -/** - * If .squint.db is missing from repoDir but project has a squintDbUrl, - * download it to a temp file and set SQUINT_DB_PATH in process.env. - * Returns a cleanup function (or null if nothing was downloaded). - */ -export async function setupRemoteSquintDb( - repoDir: string, - project: { squintDbUrl?: string }, - log: { - info: (msg: string, ctx?: Record) => void; - warn: (msg: string, ctx?: Record) => void; - }, -): Promise<(() => void) | null> { - // Local DB takes precedence - if (existsSync(join(repoDir, '.squint.db'))) return null; - - if (!project.squintDbUrl) return null; - - const tempPath = join(tmpdir(), `cascade-squint-${randomUUID()}.db`); - const startTime = Date.now(); - - try { - const response = await fetch(project.squintDbUrl); - if (!response.ok) { - log.warn('Failed to download remote Squint DB', { - url: project.squintDbUrl, - status: response.status, - }); - return null; - } - - if (!response.body) { - log.warn('Remote Squint DB response has no body', { url: project.squintDbUrl }); - return null; - } - - const writeStream = createWriteStream(tempPath); - await pipeline( - Readable.fromWeb(response.body as import('node:stream/web').ReadableStream), - writeStream, - ); - - const fileInfo = await stat(tempPath); - const durationMs = Date.now() - startTime; - - log.info('Downloaded remote Squint DB', { - url: project.squintDbUrl, - path: tempPath, - sizeBytes: fileInfo.size, - durationMs, - }); - - process.env.SQUINT_DB_PATH = tempPath; - - return () => { - try { - if (existsSync(tempPath)) unlinkSync(tempPath); - } catch { - // Ignore cleanup errors - } - process.env.SQUINT_DB_PATH = undefined; - }; - } catch (err) { - log.warn('Failed to download remote Squint DB', { - url: project.squintDbUrl, - error: String(err), - }); - // Clean up partial download - try { - if (existsSync(tempPath)) unlinkSync(tempPath); - } catch { - // Ignore - } - return null; - } -} diff --git a/tests/unit/agents/definitions/loader.test.ts b/tests/unit/agents/definitions/loader.test.ts index c194c906..83b739f5 100644 --- a/tests/unit/agents/definitions/loader.test.ts +++ b/tests/unit/agents/definitions/loader.test.ts @@ -157,7 +157,6 @@ describe('YAML agent definitions loader', () => { expect(statusChangedTrigger?.contextPipeline).toEqual([ 'directoryListing', 'contextFiles', - 'squint', 'workItem', 'prepopulateTodos', ]); @@ -166,7 +165,7 @@ describe('YAML agent definitions loader', () => { it('review agent triggers use PR context pipeline', () => { const def = loadBuiltinDefinition('review'); const ciPassedTrigger = def.triggers.find((t) => t.event === 'scm:check-suite-success'); - expect(ciPassedTrigger?.contextPipeline).toEqual(['prContext', 'contextFiles', 'squint']); + expect(ciPassedTrigger?.contextPipeline).toEqual(['prContext', 'contextFiles']); }); it('planning agent does not have pm:comment-mention trigger (routed to respond-to-planning-comment)', () => { @@ -190,7 +189,6 @@ describe('YAML agent definitions loader', () => { 'prContext', 'directoryListing', 'contextFiles', - 'squint', 'workItem', ]); }); @@ -203,7 +201,6 @@ describe('YAML agent definitions loader', () => { 'prConversation', 'directoryListing', 'contextFiles', - 'squint', ]); }); diff --git a/tests/unit/agents/definitions/strategies.test.ts b/tests/unit/agents/definitions/strategies.test.ts index ccef058a..ea77dea3 100644 --- a/tests/unit/agents/definitions/strategies.test.ts +++ b/tests/unit/agents/definitions/strategies.test.ts @@ -7,7 +7,6 @@ describe.concurrent('CONTEXT_STEP_REGISTRY', () => { const expectedKeys = [ 'directoryListing', 'contextFiles', - 'squint', 'workItem', 'prepopulateTodos', 'prContext', @@ -31,7 +30,6 @@ describe.concurrent('CONTEXT_STEP_REGISTRY', () => { const expectedKeys = [ 'directoryListing', 'contextFiles', - 'squint', 'workItem', 'prepopulateTodos', 'prContext', @@ -52,10 +50,6 @@ describe.concurrent('CONTEXT_STEP_REGISTRY', () => { expect(typeof CONTEXT_STEP_REGISTRY.contextFiles).toBe('function'); }); - it('squint entry is a function', () => { - expect(typeof CONTEXT_STEP_REGISTRY.squint).toBe('function'); - }); - it('workItem entry is a function', () => { expect(typeof CONTEXT_STEP_REGISTRY.workItem).toBe('function'); }); diff --git a/tests/unit/agents/prompts.test.ts b/tests/unit/agents/prompts.test.ts index 10a2741a..34f58fc6 100644 --- a/tests/unit/agents/prompts.test.ts +++ b/tests/unit/agents/prompts.test.ts @@ -496,18 +496,6 @@ describe('getTemplateVariables', () => { expect(names).toContain('workItemId'); expect(names).toContain('projectId'); }); - - it('includes squintEnabled variable', () => { - const vars = getTemplateVariables(); - const names = vars.map((v) => v.name); - expect(names).toContain('squintEnabled'); - }); - - it('squintEnabled variable belongs to Squint group', () => { - const vars = getTemplateVariables(); - const squintVar = vars.find((v) => v.name === 'squintEnabled'); - expect(squintVar?.group).toBe('Squint'); - }); }); describe('PM terminology rendering', () => { @@ -701,136 +689,6 @@ describe('debug agent gadget naming', () => { }); }); -describe('squintEnabled template gating', () => { - it('implementation prompt with squintEnabled=true includes squint instructions', () => { - const prompt = getSystemPrompt('implementation', { squintEnabled: true }); - expect(prompt).toContain('squint features show'); - expect(prompt).toContain('squint flows show'); - expect(prompt).toContain('squint modules show'); - expect(prompt).toContain('squint-modules'); - expect(prompt).toContain('squint-features'); - }); - - it('implementation prompt with squintEnabled=false excludes squint instructions', () => { - const prompt = getSystemPrompt('implementation', { squintEnabled: false }); - expect(prompt).not.toContain('squint features show'); - expect(prompt).not.toContain('squint flows show'); - expect(prompt).not.toContain('squint modules show'); - expect(prompt).not.toContain('squint-modules'); - expect(prompt).not.toContain('squint-features'); - }); - - it('implementation prompt with squintEnabled=false still contains core instructions', () => { - const prompt = getSystemPrompt('implementation', { squintEnabled: false }); - expect(prompt).toContain('CLAUDE.md'); - expect(prompt).toContain('Tmux'); - expect(prompt).toContain('conventional commits'); - }); - - it('planning prompt with squintEnabled=true includes squint instructions', () => { - const prompt = getSystemPrompt('planning', { squintEnabled: true }); - expect(prompt).toContain('squint features show'); - expect(prompt).toContain('squint flows show'); - expect(prompt).toContain('squint modules show'); - }); - - it('planning prompt with squintEnabled=false excludes squint instructions', () => { - const prompt = getSystemPrompt('planning', { squintEnabled: false }); - expect(prompt).not.toContain('squint features show'); - expect(prompt).not.toContain('squint flows show'); - expect(prompt).not.toContain('squint modules show'); - }); - - it('planning prompt with squintEnabled=false still contains core instructions', () => { - const prompt = getSystemPrompt('planning', { squintEnabled: false }); - expect(prompt).toContain('ReadWorkItem'); - expect(prompt).toContain('implementation plan'); - }); - - it('splitting prompt with squintEnabled=true includes squint instructions', () => { - const prompt = getSystemPrompt('splitting', { squintEnabled: true }); - expect(prompt).toContain('squint features show'); - expect(prompt).toContain('squint modules show'); - }); - - it('splitting prompt with squintEnabled=false excludes squint instructions', () => { - const prompt = getSystemPrompt('splitting', { squintEnabled: false }); - expect(prompt).not.toContain('squint features show'); - expect(prompt).not.toContain('squint modules show'); - }); - - it('review prompt with squintEnabled=true includes squint instructions', () => { - const prompt = getSystemPrompt('review', { squintEnabled: true }); - expect(prompt).toContain('squint modules show'); - expect(prompt).toContain('Squint for Conflict Detection'); - expect(prompt).toContain('squint features show'); - }); - - it('review prompt with squintEnabled=false excludes squint-specific instructions', () => { - const prompt = getSystemPrompt('review', { squintEnabled: false }); - expect(prompt).not.toContain('Squint for Conflict Detection'); - expect(prompt).not.toContain('squint modules show'); - expect(prompt).not.toContain('squint features show'); - expect(prompt).not.toContain('Use squint to see the forest'); - expect(prompt).not.toContain('with squint evidence'); - }); - - it('review prompt with squintEnabled=true includes philosophy squint reference', () => { - const prompt = getSystemPrompt('review', { squintEnabled: true }); - expect(prompt).toContain('Use squint to see the forest, not just the trees.'); - }); - - it('review prompt with squintEnabled=false still contains core review instructions', () => { - const prompt = getSystemPrompt('review', { squintEnabled: false }); - expect(prompt).toContain('BLOCKING'); - expect(prompt).toContain('APPROVE'); - expect(prompt).toContain('REQUEST_CHANGES'); - }); - - it('respond-to-planning-comment prompt with squintEnabled=true includes squint instructions', () => { - const prompt = getSystemPrompt('respond-to-planning-comment', { squintEnabled: true }); - expect(prompt).toContain('squint features show'); - expect(prompt).toContain('squint flows show'); - expect(prompt).toContain('squint modules show'); - }); - - it('respond-to-planning-comment prompt with squintEnabled=false excludes squint instructions', () => { - const prompt = getSystemPrompt('respond-to-planning-comment', { squintEnabled: false }); - expect(prompt).not.toContain('squint features show'); - expect(prompt).not.toContain('squint flows show'); - expect(prompt).not.toContain('squint modules show'); - }); - - it('squint-exploration partial with squintEnabled=true includes squint protocol', () => { - const partial = getRawPartial('squint-exploration'); - // The partial itself contains the conditional; render it with the context - const rendered = renderCustomPrompt(partial, { squintEnabled: true }); - expect(rendered).toContain('squint features show'); - expect(rendered).toContain('squint symbols show'); - }); - - it('squint-exploration partial with squintEnabled=false shows fallback message', () => { - const partial = getRawPartial('squint-exploration'); - const rendered = renderCustomPrompt(partial, { squintEnabled: false }); - expect(rendered).not.toContain('squint features show'); - expect(rendered).toContain('no Squint database'); - }); - - it('tmux partial with squintEnabled=true includes squint session name examples', () => { - const partial = getRawPartial('tmux'); - const rendered = renderCustomPrompt(partial, { squintEnabled: true }); - expect(rendered).toContain('squint-modules'); - expect(rendered).toContain('squint-features'); - }); - - it('tmux partial with squintEnabled=false excludes squint session name examples', () => { - const partial = getRawPartial('tmux'); - const rendered = renderCustomPrompt(partial, { squintEnabled: false }); - expect(rendered).not.toContain('squint-modules'); - expect(rendered).not.toContain('squint-features'); - }); -}); - describe('documentation-maintenance partial', () => { it('partial exists in getAvailablePartialNames()', () => { const names = getAvailablePartialNames(); diff --git a/tests/unit/agents/shared/builderFactory.test.ts b/tests/unit/agents/shared/builderFactory.test.ts index 07a122cb..eac6a65c 100644 --- a/tests/unit/agents/shared/builderFactory.test.ts +++ b/tests/unit/agents/shared/builderFactory.test.ts @@ -1,9 +1,5 @@ import { beforeEach, describe, expect, it, vi } from 'vitest'; -vi.mock('../../../../src/utils/squintDb.js', () => ({ - resolveSquintDbPath: vi.fn().mockReturnValue(null), -})); - vi.mock('../../../../src/config/compactionConfig.js', () => ({ getCompactionConfig: vi.fn().mockReturnValue({ maxTokens: 100000, strategy: 'hybrid' }), })); @@ -79,15 +75,10 @@ vi.mock('llmist', () => ({ import { execSync } from 'node:child_process'; import { AgentBuilder, BudgetPricingUnavailableError } from 'llmist'; import { getAgentCapabilities } from '../../../../src/agents/definitions/index.js'; -import { - createConfiguredBuilder, - isSquintEnabled, -} from '../../../../src/agents/shared/builderFactory.js'; +import { createConfiguredBuilder } from '../../../../src/agents/shared/builderFactory.js'; import { initSessionState, setReadOnlyFs } from '../../../../src/gadgets/sessionState.js'; -import { resolveSquintDbPath } from '../../../../src/utils/squintDb.js'; const mockExecSync = vi.mocked(execSync); -const mockResolveSquintDbPath = vi.mocked(resolveSquintDbPath); const mockInitSessionState = vi.mocked(initSessionState); const mockSetReadOnlyFs = vi.mocked(setReadOnlyFs); const mockGetAgentCapabilities = vi.mocked(getAgentCapabilities); @@ -123,8 +114,6 @@ function createBaseOptions(overrides?: object) { } beforeEach(() => { - mockResolveSquintDbPath.mockReturnValue(null); - // Reset all mock builder methods to return the builder instance for (const key of Object.keys(mockBuilderInstance)) { (mockBuilderInstance as Record>)[key].mockReturnValue( @@ -133,22 +122,6 @@ beforeEach(() => { } }); -// ============================================================================ -// isSquintEnabled -// ============================================================================ - -describe('isSquintEnabled', () => { - it('returns false when resolveSquintDbPath returns null', () => { - mockResolveSquintDbPath.mockReturnValue(null); - expect(isSquintEnabled('/repo')).toBe(false); - }); - - it('returns true when resolveSquintDbPath returns a path', () => { - mockResolveSquintDbPath.mockReturnValue('/repo/.squint.db'); - expect(isSquintEnabled('/repo')).toBe(true); - }); -}); - // ============================================================================ // createConfiguredBuilder // ============================================================================ diff --git a/tests/unit/agents/shared/executionPipeline.test.ts b/tests/unit/agents/shared/executionPipeline.test.ts index 12a30113..8af60510 100644 --- a/tests/unit/agents/shared/executionPipeline.test.ts +++ b/tests/unit/agents/shared/executionPipeline.test.ts @@ -25,10 +25,6 @@ vi.mock('../../../../src/utils/lifecycle.js', () => ({ clearWatchdogCleanup: vi.fn(), })); -vi.mock('../../../../src/utils/squintDb.js', () => ({ - setupRemoteSquintDb: vi.fn().mockResolvedValue(null), -})); - vi.mock('../../../../src/utils/logging.js', () => ({ logger: { info: vi.fn(), @@ -63,7 +59,6 @@ import { import { clearWatchdogCleanup, setWatchdogCleanup } from '../../../../src/utils/lifecycle.js'; import { logger } from '../../../../src/utils/logging.js'; import { cleanupTempDir } from '../../../../src/utils/repo.js'; -import { setupRemoteSquintDb } from '../../../../src/utils/squintDb.js'; const mockCreateFileLogger = vi.mocked(createFileLogger); const mockCreateAgentLogger = vi.mocked(createAgentLogger); @@ -74,7 +69,6 @@ const mockCleanupLogFile = vi.mocked(cleanupLogFile); const mockCleanupLogDirectory = vi.mocked(cleanupLogDirectory); const mockClearWatchdogCleanup = vi.mocked(clearWatchdogCleanup); const mockSetWatchdogCleanup = vi.mocked(setWatchdogCleanup); -const mockSetupRemoteSquintDb = vi.mocked(setupRemoteSquintDb); function setupMocks() { const mockLoggerInstance = { @@ -88,7 +82,6 @@ function setupMocks() { mockCreateFileLogger.mockReturnValue(mockLoggerInstance as never); mockCreateAgentLogger.mockReturnValue({ info: vi.fn(), warn: vi.fn(), error: vi.fn() } as never); mockLoadCascadeEnv.mockReturnValue({}); - mockSetupRemoteSquintDb.mockResolvedValue(null); return mockLoggerInstance; } diff --git a/tests/unit/agents/shared/promptContext.test.ts b/tests/unit/agents/shared/promptContext.test.ts index 11957c19..d445094a 100644 --- a/tests/unit/agents/shared/promptContext.test.ts +++ b/tests/unit/agents/shared/promptContext.test.ts @@ -5,18 +5,10 @@ vi.mock('../../../../src/pm/index.js', () => ({ getPMProviderOrNull: vi.fn(), })); -// Mock resolveSquintDbPath to control squint availability -vi.mock('../../../../src/utils/squintDb.js', () => ({ - resolveSquintDbPath: vi.fn(), -})); - import { buildPromptContext } from '../../../../src/agents/shared/promptContext.js'; import { getPMProviderOrNull } from '../../../../src/pm/index.js'; -import { resolveSquintDbPath } from '../../../../src/utils/squintDb.js'; import { createMockPMProvider } from '../../../helpers/mockPMProvider.js'; -const mockResolveSquintDbPath = vi.mocked(resolveSquintDbPath); - const mockGetPMProvider = vi.mocked(getPMProviderOrNull); function makeProject(overrides: Record = {}) { @@ -438,64 +430,4 @@ describe('buildPromptContext', () => { expect(ctx.detectedAgentType).toBe('implementation'); }); }); - - describe('squintEnabled', () => { - beforeEach(() => { - const mockProvider = createMockPMProvider(); - mockProvider.type = 'trello'; - mockProvider.getWorkItemUrl = vi.fn((id: string) => `https://trello.com/c/${id}`); - mockGetPMProvider.mockReturnValue(mockProvider); - }); - - it('returns squintEnabled: true when resolveSquintDbPath returns a path', () => { - mockResolveSquintDbPath.mockReturnValue('/repo/.squint.db'); - const ctx = buildPromptContext( - 'card1', - makeProject() as never, - undefined, - undefined, - undefined, - '/repo', - ); - expect(ctx.squintEnabled).toBe(true); - }); - - it('returns squintEnabled: false when resolveSquintDbPath returns null', () => { - mockResolveSquintDbPath.mockReturnValue(null); - const ctx = buildPromptContext( - 'card1', - makeProject() as never, - undefined, - undefined, - undefined, - '/repo', - ); - expect(ctx.squintEnabled).toBe(false); - }); - - it('returns squintEnabled: false when repoDir is not provided', () => { - mockResolveSquintDbPath.mockReturnValue('/some/path.db'); - const ctx = buildPromptContext('card1', makeProject() as never); - expect(ctx.squintEnabled).toBe(false); - }); - - it('does not call resolveSquintDbPath when repoDir is undefined', () => { - mockResolveSquintDbPath.mockReturnValue('/some/path.db'); - buildPromptContext('card1', makeProject() as never); - expect(mockResolveSquintDbPath).not.toHaveBeenCalled(); - }); - - it('calls resolveSquintDbPath with the provided repoDir', () => { - mockResolveSquintDbPath.mockReturnValue(null); - buildPromptContext( - 'card1', - makeProject() as never, - undefined, - undefined, - undefined, - '/workspace/my-repo', - ); - expect(mockResolveSquintDbPath).toHaveBeenCalledWith('/workspace/my-repo'); - }); - }); }); diff --git a/tests/unit/backends/agent-profiles.test.ts b/tests/unit/backends/agent-profiles.test.ts index 89e3dc30..ddede2d7 100644 --- a/tests/unit/backends/agent-profiles.test.ts +++ b/tests/unit/backends/agent-profiles.test.ts @@ -115,10 +115,6 @@ vi.mock('../../../src/github/client.js', () => mockGitHubClientModule); vi.mock('../../../src/agents/utils/setup.js', () => ({})); -vi.mock('../../../src/utils/squintDb.js', () => ({ - resolveSquintDbPath: vi.fn(() => null), -})); - // Mock agentMessages to avoid requiring initAgentMessages() in tests vi.mock('../../../src/config/agentMessages.js', () => ({ INITIAL_MESSAGES: new Proxy( @@ -151,11 +147,6 @@ vi.mock('../../../src/config/agentMessages.js', () => ({ getAgentLabel: vi.fn(() => ({ emoji: '⚙️', label: 'Progress Update' })), })); -vi.mock('node:child_process', () => ({ - execFileSync: vi.fn(() => 'squint overview output'), -})); - -import { execFileSync } from 'node:child_process'; import { type AgentProfile, getAgentProfile, @@ -171,10 +162,7 @@ import { } from '../../../src/agents/shared/prFormatting.js'; import { readWorkItem, readWorkItemWithMedia } from '../../../src/gadgets/pm/core/readWorkItem.js'; import { githubClient } from '../../../src/github/client.js'; -import { resolveSquintDbPath } from '../../../src/utils/squintDb.js'; -const mockExecFileSync = vi.mocked(execFileSync); -const mockResolveSquintDbPath = vi.mocked(resolveSquintDbPath); const _mockReadWorkItem = vi.mocked(readWorkItem); const mockReadWorkItemWithMedia = vi.mocked(readWorkItemWithMedia); @@ -574,7 +562,6 @@ function makeContextParams(overrides: { describe('fetchDirectoryListing', () => { it('splitting fetchContext returns a ListDirectory injection with maxDepth:3', async () => { - mockResolveSquintDbPath.mockReturnValue(null); const profile = await getAgentProfile('splitting'); const params = makeContextParams({ workItemId: undefined, triggerEvent: 'pm:status-changed' }); @@ -595,7 +582,6 @@ describe('fetchDirectoryListing', () => { describe('fetchContextFileInjections', () => { it('returns ReadFile injections for each context file', async () => { - mockResolveSquintDbPath.mockReturnValue(null); const profile = await getAgentProfile('splitting'); const params = makeContextParams({ triggerEvent: 'pm:status-changed', @@ -618,7 +604,6 @@ describe('fetchContextFileInjections', () => { }); it('returns no ReadFile injections when contextFiles is empty', async () => { - mockResolveSquintDbPath.mockReturnValue(null); const profile = await getAgentProfile('splitting'); const params = makeContextParams({ triggerEvent: 'pm:status-changed', contextFiles: [] }); @@ -631,70 +616,8 @@ describe('fetchContextFileInjections', () => { }); }); -describe('fetchSquintOverview', () => { - it('returns SquintOverview injection when squint db is present', async () => { - mockResolveSquintDbPath.mockReturnValue('/repo/.squint.db'); - mockExecFileSync.mockReturnValue('squint overview output\n'); - const profile = await getAgentProfile('splitting'); - const params = makeContextParams({ triggerEvent: 'pm:status-changed' }); - - const injections = await profile.fetchContext( - params as Parameters[0], - ); - - const squintInjection = injections.find((i) => i.toolName === 'SquintOverview'); - expect(squintInjection).toBeDefined(); - expect(squintInjection?.result).toBe('squint overview output\n'); - expect(squintInjection?.params).toMatchObject({ database: '/repo/.squint.db' }); - }); - - it('returns no SquintOverview injection when squint db is absent', async () => { - mockResolveSquintDbPath.mockReturnValue(null); - const profile = await getAgentProfile('splitting'); - const params = makeContextParams({ triggerEvent: 'pm:status-changed' }); - - const injections = await profile.fetchContext( - params as Parameters[0], - ); - - const squintInjection = injections.find((i) => i.toolName === 'SquintOverview'); - expect(squintInjection).toBeUndefined(); - }); - - it('returns no SquintOverview injection when squint command throws', async () => { - mockResolveSquintDbPath.mockReturnValue('/repo/.squint.db'); - mockExecFileSync.mockImplementation(() => { - throw new Error('squint not found'); - }); - const profile = await getAgentProfile('splitting'); - const params = makeContextParams({ triggerEvent: 'pm:status-changed' }); - - const injections = await profile.fetchContext( - params as Parameters[0], - ); - - const squintInjection = injections.find((i) => i.toolName === 'SquintOverview'); - expect(squintInjection).toBeUndefined(); - }); - - it('returns no SquintOverview injection when squint output is empty', async () => { - mockResolveSquintDbPath.mockReturnValue('/repo/.squint.db'); - mockExecFileSync.mockReturnValue(' '); - const profile = await getAgentProfile('splitting'); - const params = makeContextParams({ triggerEvent: 'pm:status-changed' }); - - const injections = await profile.fetchContext( - params as Parameters[0], - ); - - const squintInjection = injections.find((i) => i.toolName === 'SquintOverview'); - expect(squintInjection).toBeUndefined(); - }); -}); - describe('fetchWorkItemInjection', () => { it('returns ReadWorkItem injection when readWorkItemWithMedia resolves', async () => { - mockResolveSquintDbPath.mockReturnValue(null); mockReadWorkItemWithMedia.mockResolvedValue({ text: '# card title\n\ncard body', media: [] }); const profile = await getAgentProfile('splitting'); const params = makeContextParams({ triggerEvent: 'pm:status-changed', workItemId: 'card-123' }); @@ -714,7 +637,6 @@ describe('fetchWorkItemInjection', () => { }); it('skips injection when readWorkItemWithMedia throws', async () => { - mockResolveSquintDbPath.mockReturnValue(null); mockReadWorkItemWithMedia.mockRejectedValue(new Error('card not found')); const profile = await getAgentProfile('splitting'); const params = makeContextParams({ @@ -731,7 +653,6 @@ describe('fetchWorkItemInjection', () => { }); it('never calls readWorkItem when workItemId is absent', async () => { - mockResolveSquintDbPath.mockReturnValue(null); const profile = await getAgentProfile('splitting'); const params = makeContextParams({ triggerEvent: 'pm:status-changed', workItemId: undefined }); @@ -742,9 +663,7 @@ describe('fetchWorkItemInjection', () => { }); describe('fetchWorkItemContext orchestration', () => { - it('includes dirListing, contextFiles, squint, and workItem in order', async () => { - mockResolveSquintDbPath.mockReturnValue('/repo/.squint.db'); - mockExecFileSync.mockReturnValue('squint output\n'); + it('includes dirListing, contextFiles, and workItem in order', async () => { mockReadWorkItemWithMedia.mockResolvedValue({ text: 'card content', media: [] }); const profile = await getAgentProfile('splitting'); const params = makeContextParams({ @@ -760,21 +679,17 @@ describe('fetchWorkItemContext orchestration', () => { const toolNames = injections.map((i) => i.toolName); expect(toolNames).toContain('ListDirectory'); expect(toolNames).toContain('ReadFile'); - expect(toolNames).toContain('SquintOverview'); expect(toolNames).toContain('ReadWorkItem'); // Ordering: dirListing first const dirIdx = toolNames.indexOf('ListDirectory'); const readFileIdx = toolNames.indexOf('ReadFile'); - const squintIdx = toolNames.indexOf('SquintOverview'); const workItemIdx = toolNames.indexOf('ReadWorkItem'); expect(dirIdx).toBeLessThan(readFileIdx); - expect(readFileIdx).toBeLessThan(squintIdx); - expect(squintIdx).toBeLessThan(workItemIdx); + expect(readFileIdx).toBeLessThan(workItemIdx); }); - it('gracefully omits squint and workItem when unavailable', async () => { - mockResolveSquintDbPath.mockReturnValue(null); + it('gracefully omits workItem when unavailable', async () => { mockReadWorkItemWithMedia.mockRejectedValue(new Error('unavailable')); const profile = await getAgentProfile('splitting'); const params = makeContextParams({ triggerEvent: 'pm:status-changed', workItemId: 'card-xyz' }); @@ -783,7 +698,6 @@ describe('fetchWorkItemContext orchestration', () => { params as Parameters[0], ); - expect(injections.some((i) => i.toolName === 'SquintOverview')).toBe(false); expect(injections.some((i) => i.toolName === 'ReadWorkItem')).toBe(false); expect(injections.some((i) => i.toolName === 'ListDirectory')).toBe(true); }); @@ -798,7 +712,6 @@ describe('fetchReviewContext', () => { }); it('includes PR injections (Details, Diff, Checks)', async () => { - mockResolveSquintDbPath.mockReturnValue(null); const profile = await getAgentProfile('review'); const params = makeContextParams({ triggerEvent: 'scm:check-suite-success', @@ -817,7 +730,6 @@ describe('fetchReviewContext', () => { }); it('includes context file injections', async () => { - mockResolveSquintDbPath.mockReturnValue(null); const profile = await getAgentProfile('review'); const params = makeContextParams({ triggerEvent: 'scm:check-suite-success', @@ -835,25 +747,7 @@ describe('fetchReviewContext', () => { expect(readFileInjections[0].params).toMatchObject({ filePath: 'CLAUDE.md' }); }); - it('includes squint injection when squint db is present', async () => { - mockResolveSquintDbPath.mockReturnValue('/repo/.squint.db'); - mockExecFileSync.mockReturnValue('squint content\n'); - const profile = await getAgentProfile('review'); - const params = makeContextParams({ - triggerEvent: 'scm:check-suite-success', - repoFullName: 'acme/widgets', - prNumber: 42, - }); - - const injections = await profile.fetchContext( - params as Parameters[0], - ); - - expect(injections.some((i) => i.toolName === 'SquintOverview')).toBe(true); - }); - it('does NOT include a work item injection (review has no workItemId)', async () => { - mockResolveSquintDbPath.mockReturnValue(null); const profile = await getAgentProfile('review'); const params = makeContextParams({ triggerEvent: 'scm:check-suite-success', @@ -870,7 +764,6 @@ describe('fetchReviewContext', () => { }); it('includes file content injections for included PR files', async () => { - mockResolveSquintDbPath.mockReturnValue(null); vi.mocked(readPRFileContents).mockResolvedValue({ included: [{ path: 'src/index.ts', content: 'file content' }], skipped: [], @@ -896,7 +789,6 @@ describe('fetchReviewContext', () => { }); it('calls formatting functions', async () => { - mockResolveSquintDbPath.mockReturnValue(null); const profile = await getAgentProfile('review'); const params = makeContextParams({ triggerEvent: 'scm:check-suite-success', @@ -919,9 +811,7 @@ describe('fetchCIContext', () => { vi.mocked(readPRFileContents).mockResolvedValue({ included: [], skipped: [] }); }); - it('includes PR injections, dirListing, contextFiles, squint, and workItem', async () => { - mockResolveSquintDbPath.mockReturnValue('/repo/.squint.db'); - mockExecFileSync.mockReturnValue('squint ci output\n'); + it('includes PR injections, dirListing, contextFiles, and workItem', async () => { mockReadWorkItemWithMedia.mockResolvedValue({ text: 'ci card content', media: [] }); const profile = await getAgentProfile('respond-to-ci'); const params = makeContextParams({ @@ -942,12 +832,10 @@ describe('fetchCIContext', () => { expect(toolNames).toContain('GetPRChecks'); expect(toolNames).toContain('ListDirectory'); expect(toolNames).toContain('ReadFile'); - expect(toolNames).toContain('SquintOverview'); expect(toolNames).toContain('ReadWorkItem'); }); it('skips workItem injection when workItemId is absent', async () => { - mockResolveSquintDbPath.mockReturnValue(null); const profile = await getAgentProfile('respond-to-ci'); const params = makeContextParams({ triggerEvent: 'scm:check-suite-failure', @@ -977,7 +865,6 @@ describe('fetchPRCommentResponseContext', () => { }); it('includes PR injections and 3 conversation injections', async () => { - mockResolveSquintDbPath.mockReturnValue(null); const profile = await getAgentProfile('respond-to-pr-comment'); const params = makeContextParams({ triggerEvent: 'scm:pr-comment-mention', @@ -999,9 +886,7 @@ describe('fetchPRCommentResponseContext', () => { expect(conversationInjections).toHaveLength(3); }); - it('includes dirListing, contextFiles, and squint', async () => { - mockResolveSquintDbPath.mockReturnValue('/repo/.squint.db'); - mockExecFileSync.mockReturnValue('squint pr comment output\n'); + it('includes dirListing and contextFiles', async () => { const profile = await getAgentProfile('respond-to-pr-comment'); const params = makeContextParams({ triggerEvent: 'scm:pr-comment-mention', @@ -1017,11 +902,9 @@ describe('fetchPRCommentResponseContext', () => { const toolNames = injections.map((i) => i.toolName); expect(toolNames).toContain('ListDirectory'); expect(toolNames).toContain('ReadFile'); - expect(toolNames).toContain('SquintOverview'); }); it('calls all 3 formatting functions for conversation context', async () => { - mockResolveSquintDbPath.mockReturnValue(null); const profile = await getAgentProfile('respond-to-pr-comment'); const params = makeContextParams({ triggerEvent: 'scm:pr-comment-mention', @@ -1037,7 +920,6 @@ describe('fetchPRCommentResponseContext', () => { }); it('calls getPRReviewComments, getPRReviews, getPRIssueComments', async () => { - mockResolveSquintDbPath.mockReturnValue(null); const profile = await getAgentProfile('respond-to-pr-comment'); const params = makeContextParams({ triggerEvent: 'scm:pr-comment-mention', @@ -1059,7 +941,6 @@ describe('fetchPRCommentResponseContext', () => { describe('resolveContextPipeline edge cases', () => { it('returns empty array when triggerEvent is undefined', async () => { - mockResolveSquintDbPath.mockReturnValue(null); const profile = await getAgentProfile('implementation'); const params = makeContextParams({ triggerEvent: undefined }); @@ -1071,7 +952,6 @@ describe('resolveContextPipeline edge cases', () => { }); it('returns empty array when triggerEvent matches no trigger', async () => { - mockResolveSquintDbPath.mockReturnValue(null); const profile = await getAgentProfile('implementation'); const params = makeContextParams({ triggerEvent: 'scm:unknown-event' }); @@ -1083,7 +963,6 @@ describe('resolveContextPipeline edge cases', () => { }); it('handles agent with no triggers (debug)', async () => { - mockResolveSquintDbPath.mockReturnValue(null); const profile = await getAgentProfile('debug'); const params = makeContextParams({ triggerEvent: undefined }); @@ -1095,7 +974,6 @@ describe('resolveContextPipeline edge cases', () => { }); it('returns empty array when triggerEvent is empty string', async () => { - mockResolveSquintDbPath.mockReturnValue(null); const profile = await getAgentProfile('implementation'); const params = makeContextParams({ triggerEvent: '' }); diff --git a/tests/unit/db/repositories/configMapper.test.ts b/tests/unit/db/repositories/configMapper.test.ts index e422ebf2..5decb6fa 100644 --- a/tests/unit/db/repositories/configMapper.test.ts +++ b/tests/unit/db/repositories/configMapper.test.ts @@ -27,7 +27,6 @@ const baseProjectRow = { workItemBudgetUsd: null, progressModel: null, progressIntervalMinutes: null, - squintDbUrl: null, agentEngine: null, agentEngineSettings: null, runLinksEnabled: false, @@ -328,13 +327,6 @@ describe('mapProjectRow', () => { expect(result.workItemBudgetUsd).toBe(7.5); }); - it('includes squintDbUrl when set', () => { - const result = mapProjectRow( - makeInput({ row: { ...baseProjectRow, squintDbUrl: 'file://.squint.db' } }), - ); - expect(result.squintDbUrl).toBe('file://.squint.db'); - }); - it('does not include prompts field (prompts are now in agent definitions)', () => { const agentConfigs: AgentConfigRow[] = [ { diff --git a/tests/unit/repo-hygiene.test.ts b/tests/unit/repo-hygiene.test.ts index 445d2b1b..45ae9c1f 100644 --- a/tests/unit/repo-hygiene.test.ts +++ b/tests/unit/repo-hygiene.test.ts @@ -117,13 +117,6 @@ describe('open-source readiness', () => { }); }); - describe('.gitignore', () => { - it('ignores .squint.db', () => { - const content = readRoot('.gitignore'); - expect(content).toContain('.squint.db'); - }); - }); - describe('config/projects.json', () => { const config = JSON.parse(readRoot('config/projects.json')); @@ -154,10 +147,6 @@ describe('open-source readiness', () => { }); describe('committed artifacts are removed', () => { - it('.squint.db is not present', () => { - expect(existsSync(path.join(ROOT, '.squint.db'))).toBe(false); - }); - it('tmp-test.sh is not present', () => { expect(existsSync(path.join(ROOT, 'tmp-test.sh'))).toBe(false); }); diff --git a/tests/unit/utils/squintDb.test.ts b/tests/unit/utils/squintDb.test.ts deleted file mode 100644 index 88773abd..00000000 --- a/tests/unit/utils/squintDb.test.ts +++ /dev/null @@ -1,187 +0,0 @@ -import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest'; - -vi.mock('node:fs', () => ({ - existsSync: vi.fn(), - createWriteStream: vi.fn(), - unlinkSync: vi.fn(), -})); - -vi.mock('node:fs/promises', () => ({ - stat: vi.fn(), -})); - -vi.mock('node:stream/promises', () => ({ - pipeline: vi.fn().mockResolvedValue(undefined), -})); - -import { createWriteStream, existsSync } from 'node:fs'; -import { stat } from 'node:fs/promises'; -import { resolveSquintDbPath, setupRemoteSquintDb } from '../../../src/utils/squintDb.js'; - -const mockLog = { - info: vi.fn(), - warn: vi.fn(), -}; - -describe('squintDb', () => { - const originalEnv = process.env; - - beforeEach(() => { - process.env = { ...originalEnv }; - }); - - afterEach(() => { - process.env = originalEnv; - }); - - describe('resolveSquintDbPath', () => { - it('returns SQUINT_DB_PATH when set and file exists', () => { - process.env.SQUINT_DB_PATH = '/tmp/remote.db'; - vi.mocked(existsSync).mockImplementation((p) => p === '/tmp/remote.db'); - - expect(resolveSquintDbPath('/repo')).toBe('/tmp/remote.db'); - }); - - it('falls through to local .squint.db when SQUINT_DB_PATH file does not exist', () => { - process.env.SQUINT_DB_PATH = '/tmp/missing.db'; - vi.mocked(existsSync).mockImplementation((p) => String(p) === '/repo/.squint.db'); - - expect(resolveSquintDbPath('/repo')).toBe('/repo/.squint.db'); - }); - - it('returns local .squint.db path when no env var set', () => { - process.env.SQUINT_DB_PATH = undefined; - vi.mocked(existsSync).mockImplementation((p) => String(p) === '/repo/.squint.db'); - - expect(resolveSquintDbPath('/repo')).toBe('/repo/.squint.db'); - }); - - it('returns null when neither exists', () => { - process.env.SQUINT_DB_PATH = undefined; - vi.mocked(existsSync).mockReturnValue(false); - - expect(resolveSquintDbPath('/repo')).toBeNull(); - }); - - it('ignores SQUINT_DB_PATH when set but empty', () => { - process.env.SQUINT_DB_PATH = ''; - vi.mocked(existsSync).mockReturnValue(false); - - expect(resolveSquintDbPath('/repo')).toBeNull(); - }); - }); - - describe('setupRemoteSquintDb', () => { - it('returns null when local .squint.db exists', async () => { - vi.mocked(existsSync).mockImplementation((p) => String(p) === '/repo/.squint.db'); - - const result = await setupRemoteSquintDb( - '/repo', - { squintDbUrl: 'https://example.com/db' }, - mockLog, - ); - - expect(result).toBeNull(); - }); - - it('returns null when no squintDbUrl configured', async () => { - vi.mocked(existsSync).mockReturnValue(false); - - const result = await setupRemoteSquintDb('/repo', {}, mockLog); - - expect(result).toBeNull(); - }); - - it('returns null when squintDbUrl is undefined', async () => { - vi.mocked(existsSync).mockReturnValue(false); - - const result = await setupRemoteSquintDb('/repo', { squintDbUrl: undefined }, mockLog); - - expect(result).toBeNull(); - }); - - it('downloads DB, sets SQUINT_DB_PATH, and returns cleanup fn', async () => { - vi.mocked(existsSync).mockReturnValue(false); - - const mockWritable = { - on: vi.fn(), - write: vi.fn(), - end: vi.fn(), - once: vi.fn(), - emit: vi.fn(), - }; - vi.mocked(createWriteStream).mockReturnValue( - mockWritable as unknown as ReturnType, - ); - vi.mocked(stat).mockResolvedValue({ size: 1024 } as Awaited>); - - // Mock fetch - const mockBody = new ReadableStream({ - start(controller) { - controller.enqueue(new Uint8Array([1, 2, 3])); - controller.close(); - }, - }); - const mockResponse = { ok: true, body: mockBody }; - vi.stubGlobal('fetch', vi.fn().mockResolvedValue(mockResponse)); - - const result = await setupRemoteSquintDb( - '/repo', - { squintDbUrl: 'https://example.com/test.db' }, - mockLog, - ); - - expect(result).toBeTypeOf('function'); - expect(process.env.SQUINT_DB_PATH).toBeDefined(); - expect(process.env.SQUINT_DB_PATH).toMatch(/cascade-squint-.+\.db$/); - expect(mockLog.info).toHaveBeenCalledWith( - 'Downloaded remote Squint DB', - expect.objectContaining({ - url: 'https://example.com/test.db', - sizeBytes: 1024, - }), - ); - - // Call cleanup - result?.(); - expect(process.env.SQUINT_DB_PATH).toBeUndefined(); - }); - - it('returns null and logs warning on fetch failure', async () => { - vi.mocked(existsSync).mockReturnValue(false); - - const mockResponse = { ok: false, status: 404 }; - vi.stubGlobal('fetch', vi.fn().mockResolvedValue(mockResponse)); - - const result = await setupRemoteSquintDb( - '/repo', - { squintDbUrl: 'https://example.com/missing.db' }, - mockLog, - ); - - expect(result).toBeNull(); - expect(mockLog.warn).toHaveBeenCalledWith( - 'Failed to download remote Squint DB', - expect.objectContaining({ status: 404 }), - ); - }); - - it('returns null and logs warning on network error', async () => { - vi.mocked(existsSync).mockReturnValue(false); - - vi.stubGlobal('fetch', vi.fn().mockRejectedValue(new Error('ECONNREFUSED'))); - - const result = await setupRemoteSquintDb( - '/repo', - { squintDbUrl: 'https://example.com/unreachable.db' }, - mockLog, - ); - - expect(result).toBeNull(); - expect(mockLog.warn).toHaveBeenCalledWith( - 'Failed to download remote Squint DB', - expect.objectContaining({ error: 'Error: ECONNREFUSED' }), - ); - }); - }); -}); From aac9c2ad609f2923aabc0c8e33400ac4f489f537 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 14 Apr 2026 20:54:28 +0200 Subject: [PATCH 20/52] chore(deps): bump axios from 1.13.5 to 1.15.0 (#1093) --- package-lock.json | 17 +++++++++++------ 1 file changed, 11 insertions(+), 6 deletions(-) diff --git a/package-lock.json b/package-lock.json index 75b129a6..6dac234d 100644 --- a/package-lock.json +++ b/package-lock.json @@ -4875,14 +4875,14 @@ "license": "MIT" }, "node_modules/axios": { - "version": "1.13.5", - "resolved": "https://registry.npmjs.org/axios/-/axios-1.13.5.tgz", - "integrity": "sha512-cz4ur7Vb0xS4/KUN0tPWe44eqxrIu31me+fbang3ijiNscE129POzipJJA6zniq2C/Z6sJCjMimjS8Lc/GAs8Q==", + "version": "1.15.0", + "resolved": "https://registry.npmjs.org/axios/-/axios-1.15.0.tgz", + "integrity": "sha512-wWyJDlAatxk30ZJer+GeCWS209sA42X+N5jU2jy6oHTp7ufw8uzUTVFBX9+wTfAlhiJXGS0Bq7X6efruWjuK9Q==", "license": "MIT", "dependencies": { "follow-redirects": "^1.15.11", "form-data": "^4.0.5", - "proxy-from-env": "^1.1.0" + "proxy-from-env": "^2.1.0" } }, "node_modules/b4a": { @@ -9462,8 +9462,13 @@ } }, "node_modules/proxy-from-env": { - "version": "1.1.0", - "license": "MIT" + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/proxy-from-env/-/proxy-from-env-2.1.0.tgz", + "integrity": "sha512-cJ+oHTW1VAEa8cJslgmUZrc+sjRKgAKl3Zyse6+PV38hZe/V6Z14TbCuXcan9F9ghlz4QrFr2c92TNF82UkYHA==", + "license": "MIT", + "engines": { + "node": ">=10" + } }, "node_modules/pump": { "version": "3.0.3", From 7ac16556c2470270240095bcf4b58b6cefc14c9c Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 14 Apr 2026 20:54:49 +0200 Subject: [PATCH 21/52] chore(deps): bump hono from 4.12.9 to 4.12.12 (#1090) --- package-lock.json | 8 ++++---- package.json | 2 +- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/package-lock.json b/package-lock.json index 6dac234d..040246d4 100644 --- a/package-lock.json +++ b/package-lock.json @@ -28,7 +28,7 @@ "dockerode": "^4.0.9", "drizzle-orm": "^0.45.1", "eta": "^4.5.0", - "hono": "^4.12.7", + "hono": "^4.12.12", "jira.js": "^5.3.0", "js-yaml": "^4.1.1", "llmist": "^16.0.4", @@ -7518,9 +7518,9 @@ } }, "node_modules/hono": { - "version": "4.12.9", - "resolved": "https://registry.npmjs.org/hono/-/hono-4.12.9.tgz", - "integrity": "sha512-wy3T8Zm2bsEvxKZM5w21VdHDDcwVS1yUFFY6i8UobSsKfFceT7TOwhbhfKsDyx7tYQlmRM5FLpIuYvNFyjctiA==", + "version": "4.12.12", + "resolved": "https://registry.npmjs.org/hono/-/hono-4.12.12.tgz", + "integrity": "sha512-p1JfQMKaceuCbpJKAPKVqyqviZdS0eUxH9v82oWo1kb9xjQ5wA6iP3FNVAPDFlz5/p7d45lO+BpSk1tuSZMF4Q==", "license": "MIT", "engines": { "node": ">=16.9.0" diff --git a/package.json b/package.json index abb447e9..d8cf0324 100644 --- a/package.json +++ b/package.json @@ -73,7 +73,7 @@ "dockerode": "^4.0.9", "drizzle-orm": "^0.45.1", "eta": "^4.5.0", - "hono": "^4.12.7", + "hono": "^4.12.12", "jira.js": "^5.3.0", "js-yaml": "^4.1.1", "llmist": "^16.0.4", From ce2a4bcced07876d433d75ab04fa3aa0a30b87d8 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 14 Apr 2026 20:55:58 +0200 Subject: [PATCH 22/52] chore(deps): bump vite from 7.3.1 to 7.3.2 (#1087) --- package-lock.json | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/package-lock.json b/package-lock.json index 040246d4..a2a8d6ea 100644 --- a/package-lock.json +++ b/package-lock.json @@ -10736,9 +10736,9 @@ } }, "node_modules/vite": { - "version": "7.3.1", - "resolved": "https://registry.npmjs.org/vite/-/vite-7.3.1.tgz", - "integrity": "sha512-w+N7Hifpc3gRjZ63vYBXA56dvvRlNWRczTdmCBBa+CotUzAPf5b7YMdMR/8CQoeYE5LX3W4wj6RYTgonm1b9DA==", + "version": "7.3.2", + "resolved": "https://registry.npmjs.org/vite/-/vite-7.3.2.tgz", + "integrity": "sha512-Bby3NOsna2jsjfLVOHKes8sGwgl4TT0E6vvpYgnAYDIF/tie7MRaFthmKuHx1NSXjiTueXH3do80FMQgvEktRg==", "dev": true, "license": "MIT", "dependencies": { From cfa7aa9ca9cf1a5130876702a0dd3726e73717a2 Mon Sep 17 00:00:00 2001 From: aaight Date: Tue, 14 Apr 2026 20:57:13 +0200 Subject: [PATCH 23/52] feat(linear): add Linear GraphQL API client (#1094) --- package.json | 3 +- src/linear/client.ts | 600 +++++++++++++++++++++++++++++++++++++++++++ src/linear/types.ts | 132 ++++++++++ 3 files changed, 734 insertions(+), 1 deletion(-) create mode 100644 src/linear/client.ts create mode 100644 src/linear/types.ts diff --git a/package.json b/package.json index d8cf0324..0e1aa8b0 100644 --- a/package.json +++ b/package.json @@ -134,6 +134,7 @@ "overrides": { "lodash": "^4.18.1", "lodash-es": "^4.18.1", - "brace-expansion": "^2.0.3" + "brace-expansion": "^2.0.3", + "axios": "^1.15.0" } } diff --git a/src/linear/client.ts b/src/linear/client.ts new file mode 100644 index 00000000..e4a3a429 --- /dev/null +++ b/src/linear/client.ts @@ -0,0 +1,600 @@ +/** + * Linear GraphQL API client. + * + * Same AsyncLocalStorage pattern as the Trello and JIRA clients — credentials + * are scoped per-request via withLinearCredentials(). + * + * API endpoint: https://api.linear.app/graphql + * Auth: Authorization: Bearer + */ + +import { AsyncLocalStorage } from 'node:async_hooks'; +import { logger } from '../utils/logging.js'; +import type { + LinearAttachment, + LinearComment, + LinearCreateIssueInput, + LinearCredentials, + LinearIssue, + LinearLabel, + LinearReaction, + LinearUpdateIssueInput, + LinearUser, +} from './types.js'; + +const LINEAR_API_URL = 'https://api.linear.app/graphql'; + +const linearCredentialStore = new AsyncLocalStorage(); + +export function withLinearCredentials( + creds: LinearCredentials, + fn: () => Promise, +): Promise { + return linearCredentialStore.run(creds, fn); +} + +export function getLinearCredentials(): LinearCredentials { + const scoped = linearCredentialStore.getStore(); + if (!scoped) { + throw new Error( + 'No Linear credentials in scope. Wrap the call with withLinearCredentials() or ensure per-project LINEAR_API_KEY is set in the database.', + ); + } + return scoped; +} + +// ============================================================================ +// Core GraphQL fetch helper +// ============================================================================ + +interface GraphQLResponse { + data?: T; + errors?: Array<{ message: string; extensions?: Record }>; +} + +async function linearGraphQL(query: string, variables?: Record): Promise { + const { apiKey } = getLinearCredentials(); + + const response = await fetch(LINEAR_API_URL, { + method: 'POST', + headers: { + 'Content-Type': 'application/json', + Authorization: `Bearer ${apiKey}`, + }, + body: JSON.stringify({ query, variables }), + }); + + if (!response.ok) { + throw new Error(`Linear API HTTP error ${response.status}`); + } + + const json = (await response.json()) as GraphQLResponse; + + if (json.errors && json.errors.length > 0) { + const messages = json.errors.map((e) => e.message).join('; '); + throw new Error(`Linear API error: ${messages}`); + } + + if (json.data === undefined) { + throw new Error('Linear API returned no data'); + } + + return json.data; +} + +// ============================================================================ +// Response mappers +// ============================================================================ + +function mapUser( + u: + | { + id?: string; + name?: string; + email?: string; + displayName?: string; + avatarUrl?: string | null; + active?: boolean; + } + | null + | undefined, +): LinearUser | null { + if (!u) return null; + return { + id: u.id ?? '', + name: u.name ?? '', + email: u.email ?? '', + displayName: u.displayName ?? u.name ?? '', + avatarUrl: u.avatarUrl ?? null, + active: u.active ?? true, + }; +} + +function mapLabel(l: { + id?: string; + name?: string; + color?: string; + description?: string | null; +}): LinearLabel { + return { + id: l.id ?? '', + name: l.name ?? '', + color: l.color ?? '', + description: l.description ?? null, + }; +} + +interface RawIssue { + id?: string; + identifier?: string; + title?: string; + description?: string | null; + priority?: number; + priorityLabel?: string; + state?: { id?: string; name?: string; type?: string; color?: string } | null; + team?: { id?: string; name?: string; key?: string; description?: string | null } | null; + assignee?: { + id?: string; + name?: string; + email?: string; + displayName?: string; + avatarUrl?: string | null; + active?: boolean; + } | null; + labels?: { + nodes?: Array<{ id?: string; name?: string; color?: string; description?: string | null }>; + }; + url?: string; + createdAt?: string; + updatedAt?: string; +} + +function mapState(state: RawIssue['state']) { + return { + id: state?.id ?? '', + name: state?.name ?? '', + type: state?.type ?? '', + color: state?.color ?? '', + }; +} + +function mapTeam(team: RawIssue['team']) { + return { + id: team?.id ?? '', + name: team?.name ?? '', + key: team?.key ?? '', + description: team?.description ?? null, + }; +} + +function mapIssue(issue: RawIssue): LinearIssue { + return { + id: issue.id ?? '', + identifier: issue.identifier ?? '', + title: issue.title ?? '', + description: issue.description ?? null, + priority: issue.priority ?? 0, + priorityLabel: issue.priorityLabel ?? 'No priority', + state: mapState(issue.state), + team: mapTeam(issue.team), + assignee: mapUser(issue.assignee), + labels: (issue.labels?.nodes ?? []).map(mapLabel), + url: issue.url ?? '', + createdAt: issue.createdAt ?? '', + updatedAt: issue.updatedAt ?? '', + }; +} + +interface RawComment { + id?: string; + body?: string; + user?: { + id?: string; + name?: string; + email?: string; + displayName?: string; + avatarUrl?: string | null; + active?: boolean; + } | null; + createdAt?: string; + updatedAt?: string; + issue?: { id?: string }; +} + +function mapComment(c: RawComment): LinearComment { + return { + id: c.id ?? '', + body: c.body ?? '', + user: mapUser(c.user), + createdAt: c.createdAt ?? '', + updatedAt: c.updatedAt ?? '', + issueId: c.issue?.id ?? '', + }; +} + +// ============================================================================ +// GraphQL fragments +// ============================================================================ + +const USER_FIELDS = ` + id + name + email + displayName + avatarUrl + active +`; + +const LABEL_FIELDS = ` + id + name + color + description +`; + +const STATE_FIELDS = ` + id + name + type + color +`; + +const TEAM_FIELDS = ` + id + name + key + description +`; + +const ISSUE_FIELDS = ` + id + identifier + title + description + priority + priorityLabel + url + createdAt + updatedAt + state { ${STATE_FIELDS} } + team { ${TEAM_FIELDS} } + assignee { ${USER_FIELDS} } + labels { nodes { ${LABEL_FIELDS} } } +`; + +const COMMENT_FIELDS = ` + id + body + createdAt + updatedAt + user { ${USER_FIELDS} } + issue { id } +`; + +// ============================================================================ +// Linear client +// ============================================================================ + +export const linearClient = { + // ===== Issues ===== + + async getIssue(issueId: string): Promise { + logger.debug('Fetching Linear issue', { issueId }); + const data = await linearGraphQL<{ issue: unknown }>( + `query GetIssue($id: String!) { + issue(id: $id) { + ${ISSUE_FIELDS} + } + }`, + { id: issueId }, + ); + return mapIssue(data.issue as RawIssue); + }, + + async listIssues(filter?: { + teamId?: string; + assigneeId?: string; + stateId?: string; + first?: number; + }): Promise { + logger.debug('Listing Linear issues', { filter }); + + const filterObj: Record = {}; + if (filter?.teamId) filterObj.team = { id: { eq: filter.teamId } }; + if (filter?.assigneeId) filterObj.assignee = { id: { eq: filter.assigneeId } }; + if (filter?.stateId) filterObj.state = { id: { eq: filter.stateId } }; + + const data = await linearGraphQL<{ issues: { nodes: unknown[] } }>( + `query ListIssues($filter: IssueFilter, $first: Int) { + issues(filter: $filter, first: $first) { + nodes { + ${ISSUE_FIELDS} + } + } + }`, + { + filter: Object.keys(filterObj).length > 0 ? filterObj : undefined, + first: filter?.first ?? 50, + }, + ); + return (data.issues.nodes as RawIssue[]).map(mapIssue); + }, + + async createIssue(input: LinearCreateIssueInput): Promise { + logger.debug('Creating Linear issue', { title: input.title, teamId: input.teamId }); + const data = await linearGraphQL<{ issueCreate: { issue: unknown } }>( + `mutation CreateIssue($input: IssueCreateInput!) { + issueCreate(input: $input) { + issue { + ${ISSUE_FIELDS} + } + } + }`, + { input }, + ); + return mapIssue(data.issueCreate.issue as RawIssue); + }, + + async updateIssue(issueId: string, input: LinearUpdateIssueInput): Promise { + logger.debug('Updating Linear issue', { issueId }); + const data = await linearGraphQL<{ issueUpdate: { issue: unknown } }>( + `mutation UpdateIssue($id: String!, $input: IssueUpdateInput!) { + issueUpdate(id: $id, input: $input) { + issue { + ${ISSUE_FIELDS} + } + } + }`, + { id: issueId, input }, + ); + return mapIssue(data.issueUpdate.issue as RawIssue); + }, + + async updateIssueState(issueId: string, stateId: string): Promise { + logger.debug('Updating Linear issue state', { issueId, stateId }); + return linearClient.updateIssue(issueId, { stateId }); + }, + + // ===== Comments ===== + + async getIssueComments(issueId: string): Promise { + logger.debug('Fetching Linear issue comments', { issueId }); + const data = await linearGraphQL<{ issue: { comments: { nodes: unknown[] } } }>( + `query GetIssueComments($id: String!) { + issue(id: $id) { + comments { + nodes { + ${COMMENT_FIELDS} + } + } + } + }`, + { id: issueId }, + ); + return (data.issue.comments.nodes as RawComment[]).map(mapComment); + }, + + async createComment(issueId: string, body: string): Promise { + logger.debug('Creating Linear comment', { issueId, bodyLength: body.length }); + const data = await linearGraphQL<{ commentCreate: { comment: unknown } }>( + `mutation CreateComment($input: CommentCreateInput!) { + commentCreate(input: $input) { + comment { + ${COMMENT_FIELDS} + } + } + }`, + { input: { issueId, body } }, + ); + return mapComment(data.commentCreate.comment as RawComment); + }, + + async updateComment(commentId: string, body: string): Promise { + logger.debug('Updating Linear comment', { commentId, bodyLength: body.length }); + const data = await linearGraphQL<{ commentUpdate: { comment: unknown } }>( + `mutation UpdateComment($id: String!, $input: CommentUpdateInput!) { + commentUpdate(id: $id, input: $input) { + comment { + ${COMMENT_FIELDS} + } + } + }`, + { id: commentId, input: { body } }, + ); + return mapComment(data.commentUpdate.comment as RawComment); + }, + + async deleteComment(commentId: string): Promise { + logger.debug('Deleting Linear comment', { commentId }); + const data = await linearGraphQL<{ commentDelete: { success: boolean } }>( + `mutation DeleteComment($id: String!) { + commentDelete(id: $id) { + success + } + }`, + { id: commentId }, + ); + if (!data.commentDelete.success) { + throw new Error(`Linear API: failed to delete comment ${commentId}`); + } + }, + + // ===== Labels ===== + + async addLabel(issueId: string, labelId: string): Promise { + logger.debug('Adding label to Linear issue', { issueId, labelId }); + // NOTE: Linear's API has no atomic add-label endpoint, so we use a + // read-then-update pattern. This is subject to a TOCTOU race: two + // concurrent addLabel/removeLabel calls on the same issue can overwrite + // each other's changes. This is a known API limitation. + const issue = await linearClient.getIssue(issueId); + const currentLabelIds = issue.labels.map((l) => l.id); + if (currentLabelIds.includes(labelId)) { + return issue; + } + return linearClient.updateIssue(issueId, { labelIds: [...currentLabelIds, labelId] }); + }, + + async removeLabel(issueId: string, labelId: string): Promise { + logger.debug('Removing label from Linear issue', { issueId, labelId }); + // NOTE: Linear's API has no atomic remove-label endpoint, so we use a + // read-then-update pattern. This is subject to a TOCTOU race: two + // concurrent addLabel/removeLabel calls on the same issue can overwrite + // each other's changes. This is a known API limitation. + const issue = await linearClient.getIssue(issueId); + const updatedLabelIds = issue.labels.map((l) => l.id).filter((id) => id !== labelId); + return linearClient.updateIssue(issueId, { labelIds: updatedLabelIds }); + }, + + // ===== Attachments ===== + + async getAttachments(issueId: string): Promise { + logger.debug('Fetching Linear attachments', { issueId }); + const data = await linearGraphQL<{ + issue: { + attachments: { + nodes: Array<{ + id?: string; + title?: string; + url?: string; + subtitle?: string | null; + metadata?: Record; + createdAt?: string; + updatedAt?: string; + }>; + }; + }; + }>( + `query GetAttachments($id: String!) { + issue(id: $id) { + attachments { + nodes { + id + title + url + subtitle + metadata + createdAt + updatedAt + } + } + } + }`, + { id: issueId }, + ); + return data.issue.attachments.nodes.map((a) => ({ + id: a.id ?? '', + title: a.title ?? '', + url: a.url ?? '', + subtitle: a.subtitle ?? null, + metadata: a.metadata ?? {}, + createdAt: a.createdAt ?? '', + updatedAt: a.updatedAt ?? '', + })); + }, + + async createAttachment( + issueId: string, + input: { title: string; url: string; subtitle?: string; metadata?: Record }, + ): Promise { + logger.debug('Creating Linear attachment', { issueId, title: input.title }); + const data = await linearGraphQL<{ + attachmentCreate: { + attachment: { + id?: string; + title?: string; + url?: string; + subtitle?: string | null; + metadata?: Record; + createdAt?: string; + updatedAt?: string; + }; + }; + }>( + `mutation CreateAttachment($input: AttachmentCreateInput!) { + attachmentCreate(input: $input) { + attachment { + id + title + url + subtitle + metadata + createdAt + updatedAt + } + } + }`, + { input: { issueId, ...input } }, + ); + const a = data.attachmentCreate.attachment; + return { + id: a.id ?? '', + title: a.title ?? '', + url: a.url ?? '', + subtitle: a.subtitle ?? null, + metadata: a.metadata ?? {}, + createdAt: a.createdAt ?? '', + updatedAt: a.updatedAt ?? '', + }; + }, + + // ===== Reactions ===== + + async createReaction(commentId: string, emoji: string): Promise { + logger.debug('Creating Linear reaction', { commentId, emoji }); + const data = await linearGraphQL<{ + reactionCreate: { + reaction: { + id?: string; + emoji?: string; + user?: { + id?: string; + name?: string; + email?: string; + displayName?: string; + avatarUrl?: string | null; + active?: boolean; + } | null; + createdAt?: string; + }; + }; + }>( + `mutation CreateReaction($input: ReactionCreateInput!) { + reactionCreate(input: $input) { + reaction { + id + emoji + createdAt + user { ${USER_FIELDS} } + } + } + }`, + { input: { commentId, emoji } }, + ); + const r = data.reactionCreate.reaction; + return { + id: r.id ?? '', + emoji: r.emoji ?? '', + user: mapUser(r.user), + createdAt: r.createdAt ?? '', + }; + }, + + // ===== User ===== + + async getMe(): Promise { + logger.debug('Fetching authenticated Linear user'); + const data = await linearGraphQL<{ viewer: unknown }>( + `query GetMe { + viewer { + ${USER_FIELDS} + } + }`, + ); + const user = mapUser(data.viewer as Parameters[0]); + if (!user) { + throw new Error('Linear viewer returned null'); + } + return user; + }, +}; diff --git a/src/linear/types.ts b/src/linear/types.ts new file mode 100644 index 00000000..e582eec4 --- /dev/null +++ b/src/linear/types.ts @@ -0,0 +1,132 @@ +export interface LinearCredentials { + apiKey: string; +} + +export interface LinearUser { + id: string; + name: string; + email: string; + displayName: string; + avatarUrl: string | null; + active: boolean; +} + +export interface LinearTeam { + id: string; + name: string; + key: string; + description: string | null; +} + +export interface LinearLabel { + id: string; + name: string; + color: string; + description: string | null; +} + +export interface LinearWorkflowState { + id: string; + name: string; + type: string; + color: string; +} + +export interface LinearIssue { + id: string; + identifier: string; + title: string; + description: string | null; + priority: number; + priorityLabel: string; + state: LinearWorkflowState; + team: LinearTeam; + assignee: LinearUser | null; + labels: LinearLabel[]; + url: string; + createdAt: string; + updatedAt: string; +} + +export interface LinearComment { + id: string; + body: string; + user: LinearUser | null; + createdAt: string; + updatedAt: string; + issueId: string; +} + +export interface LinearAttachment { + id: string; + title: string; + url: string; + subtitle: string | null; + metadata: Record; + createdAt: string; + updatedAt: string; +} + +export interface LinearReaction { + id: string; + emoji: string; + user: LinearUser | null; + createdAt: string; +} + +// Input types for mutations + +export interface LinearCreateIssueInput { + title: string; + description?: string; + teamId: string; + assigneeId?: string; + stateId?: string; + priority?: number; + labelIds?: string[]; +} + +export interface LinearUpdateIssueInput { + title?: string; + description?: string; + assigneeId?: string | null; + stateId?: string; + priority?: number; + labelIds?: string[]; +} + +// Webhook payload types + +export interface LinearWebhookIssueData { + id: string; + identifier: string; + title: string; + description?: string | null; + priority: number; + priorityLabel: string; + url: string; + teamId: string; + stateId: string; + assigneeId?: string | null; + labelIds: string[]; + createdAt: string; + updatedAt: string; +} + +export interface LinearWebhookCommentData { + id: string; + body: string; + issueId: string; + userId: string; + createdAt: string; + updatedAt: string; +} + +export interface LinearWebhookPayload { + action: 'create' | 'update' | 'remove'; + type: 'Issue' | 'Comment' | 'IssueLabel' | 'Reaction'; + organizationId: string; + webhookTimestamp: number; + data: LinearWebhookIssueData | LinearWebhookCommentData | Record; + url: string; +} From f14d2cccfb01d76c290613be9696ff1694c117c9 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 14 Apr 2026 21:07:17 +0200 Subject: [PATCH 24/52] chore(deps): bump @hono/node-server from 1.19.12 to 1.19.13 (#1089) --- package-lock.json | 8 ++++---- package.json | 2 +- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/package-lock.json b/package-lock.json index a2a8d6ea..8894bc2d 100644 --- a/package-lock.json +++ b/package-lock.json @@ -10,7 +10,7 @@ "license": "MIT", "dependencies": { "@anthropic-ai/claude-agent-sdk": "^0.2.91", - "@hono/node-server": "^1.13.7", + "@hono/node-server": "^1.19.13", "@hono/trpc-server": "^0.4.2", "@llmist/cli": "^16.0.3", "@oclif/core": "^4.8.0", @@ -2097,9 +2097,9 @@ } }, "node_modules/@hono/node-server": { - "version": "1.19.12", - "resolved": "https://registry.npmjs.org/@hono/node-server/-/node-server-1.19.12.tgz", - "integrity": "sha512-txsUW4SQ1iilgE0l9/e9VQWmELXifEFvmdA1j6WFh/aFPj99hIntrSsq/if0UWyGVkmrRPKA1wCeP+UCr1B9Uw==", + "version": "1.19.13", + "resolved": "https://registry.npmjs.org/@hono/node-server/-/node-server-1.19.13.tgz", + "integrity": "sha512-TsQLe4i2gvoTtrHje625ngThGBySOgSK3Xo2XRYOdqGN1teR8+I7vchQC46uLJi8OF62YTYA3AhSpumtkhsaKQ==", "license": "MIT", "engines": { "node": ">=18.14.1" diff --git a/package.json b/package.json index 0e1aa8b0..f6bac3ec 100644 --- a/package.json +++ b/package.json @@ -55,7 +55,7 @@ "license": "MIT", "dependencies": { "@anthropic-ai/claude-agent-sdk": "^0.2.91", - "@hono/node-server": "^1.13.7", + "@hono/node-server": "^1.19.13", "@hono/trpc-server": "^0.4.2", "@llmist/cli": "^16.0.3", "@oclif/core": "^4.8.0", From ab4ad2c35f50f7a138845bc036cf0f037ff62b8e Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 14 Apr 2026 21:07:47 +0200 Subject: [PATCH 25/52] chore(deps): bump follow-redirects from 1.15.11 to 1.16.0 (#1095) --- package-lock.json | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/package-lock.json b/package-lock.json index 8894bc2d..ae33d45b 100644 --- a/package-lock.json +++ b/package-lock.json @@ -7188,7 +7188,9 @@ } }, "node_modules/follow-redirects": { - "version": "1.15.11", + "version": "1.16.0", + "resolved": "https://registry.npmjs.org/follow-redirects/-/follow-redirects-1.16.0.tgz", + "integrity": "sha512-y5rN/uOsadFT/JfYwhxRS5R7Qce+g3zG97+JrtFZlC9klX/W5hD7iiLzScI4nZqUS7DNUdhPgw4xI8W2LuXlUw==", "funding": [ { "type": "individual", From 73c646c5754049701157ab7a1bd608bf3201cbb1 Mon Sep 17 00:00:00 2001 From: aaight Date: Tue, 14 Apr 2026 21:31:44 +0200 Subject: [PATCH 26/52] feat(pm): add Linear PM provider adapter and integration (#1096) * feat(pm): add Linear PM provider adapter and integration * fix(linear): address review feedback on Linear PM provider adapter - Register LinearIntegration in bootstrap.ts following Trello/JIRA pattern - Fix addChecklistItem to pass parentId to createIssue() for proper sub-issue relationship - Add parentId to LinearCreateIssueInput type in linear/types.ts - Fix isSelfAuthored to use withLinearCredentials() with project credentials - Remove dead code from sendReaction (no-op with dead extraction logic) - Import canonical LinearConfig from pm/config.ts instead of redeclaring local type - Fix getPromptTerminology() to return issue/Linear for Linear projects - Fix getListIds() to read Linear statuses config for prompt context - Add Linear config to RouterProjectConfig and loadProjectConfig() Co-Authored-By: Claude Opus 4.6 --------- Co-authored-by: Cascade Bot Co-authored-by: Claude Opus 4.6 --- src/agents/prompts/index.ts | 2 +- src/agents/shared/promptContext.ts | 41 +- src/api/routers/webhooks/types.ts | 2 +- src/config/integrationRoles.ts | 15 +- src/config/schema.ts | 23 +- src/integrations/bootstrap.ts | 9 +- src/linear/types.ts | 1 + src/pm/config.ts | 26 ++ src/pm/index.ts | 1 + src/pm/linear/adapter.ts | 332 ++++++++++++++ src/pm/linear/integration.ts | 217 +++++++++ src/pm/types.ts | 2 +- src/router/config.ts | 13 +- tests/unit/pm/linear/adapter.test.ts | 536 +++++++++++++++++++++++ tests/unit/pm/linear/integration.test.ts | 419 ++++++++++++++++++ 15 files changed, 1618 insertions(+), 21 deletions(-) create mode 100644 src/pm/linear/adapter.ts create mode 100644 src/pm/linear/integration.ts create mode 100644 tests/unit/pm/linear/adapter.test.ts create mode 100644 tests/unit/pm/linear/integration.test.ts diff --git a/src/agents/prompts/index.ts b/src/agents/prompts/index.ts index 4f7b84b0..90ff853d 100644 --- a/src/agents/prompts/index.ts +++ b/src/agents/prompts/index.ts @@ -34,7 +34,7 @@ export interface PromptContext { projectId?: string; // PM vocabulary (computed from pmType) - pmType?: 'trello' | 'jira'; + pmType?: 'trello' | 'jira' | 'linear'; workItemNoun?: string; // "card" or "issue" workItemNounPlural?: string; // "cards" or "issues" workItemNounCap?: string; // "Card" or "Issue" diff --git a/src/agents/shared/promptContext.ts b/src/agents/shared/promptContext.ts index 0d7a7a64..1ef32ebf 100644 --- a/src/agents/shared/promptContext.ts +++ b/src/agents/shared/promptContext.ts @@ -1,4 +1,4 @@ -import { getJiraConfig, getTrelloConfig } from '../../pm/config.js'; +import { getJiraConfig, getLinearConfig, getTrelloConfig } from '../../pm/config.js'; import { getPMProviderOrNull } from '../../pm/index.js'; import type { ProjectConfig } from '../../types/index.js'; import type { PromptContext } from '../prompts/index.js'; @@ -6,29 +6,44 @@ import type { PromptContext } from '../prompts/index.js'; function getListIds(project: ProjectConfig) { const trelloConfig = getTrelloConfig(project); const jiraConfig = getJiraConfig(project); + const linearConfig = getLinearConfig(project); return { - backlogListId: trelloConfig?.lists?.backlog ?? jiraConfig?.statuses?.backlog, - todoListId: trelloConfig?.lists?.todo ?? jiraConfig?.statuses?.todo, - inProgressListId: trelloConfig?.lists?.inProgress ?? jiraConfig?.statuses?.inProgress, - inReviewListId: trelloConfig?.lists?.inReview ?? jiraConfig?.statuses?.inReview, - doneListId: trelloConfig?.lists?.done ?? jiraConfig?.statuses?.done, - mergedListId: trelloConfig?.lists?.merged ?? jiraConfig?.statuses?.merged, + backlogListId: + trelloConfig?.lists?.backlog ?? + jiraConfig?.statuses?.backlog ?? + linearConfig?.statuses?.backlog, + todoListId: + trelloConfig?.lists?.todo ?? jiraConfig?.statuses?.todo ?? linearConfig?.statuses?.todo, + inProgressListId: + trelloConfig?.lists?.inProgress ?? + jiraConfig?.statuses?.inProgress ?? + linearConfig?.statuses?.inProgress, + inReviewListId: + trelloConfig?.lists?.inReview ?? + jiraConfig?.statuses?.inReview ?? + linearConfig?.statuses?.inReview, + doneListId: + trelloConfig?.lists?.done ?? jiraConfig?.statuses?.done ?? linearConfig?.statuses?.done, + mergedListId: + trelloConfig?.lists?.merged ?? jiraConfig?.statuses?.merged ?? linearConfig?.statuses?.merged, debugListId: trelloConfig?.lists?.debug, processedLabelId: trelloConfig?.labels?.processed, - autoLabelId: trelloConfig?.labels?.auto ?? jiraConfig?.labels?.auto, + autoLabelId: + trelloConfig?.labels?.auto ?? jiraConfig?.labels?.auto ?? linearConfig?.labels?.auto, }; } function getPromptTerminology(pmType: string | undefined) { const isJira = pmType === 'jira'; + const isLinear = pmType === 'linear'; return { - workItemNoun: isJira ? 'issue' : 'card', - workItemNounPlural: isJira ? 'issues' : 'cards', - workItemNounCap: isJira ? 'Issue' : 'Card', - workItemNounPluralCap: isJira ? 'Issues' : 'Cards', - pmName: isJira ? 'JIRA' : 'Trello', + workItemNoun: isJira || isLinear ? 'issue' : 'card', + workItemNounPlural: isJira || isLinear ? 'issues' : 'cards', + workItemNounCap: isJira || isLinear ? 'Issue' : 'Card', + workItemNounPluralCap: isJira || isLinear ? 'Issues' : 'Cards', + pmName: isJira ? 'JIRA' : isLinear ? 'Linear' : 'Trello', }; } diff --git a/src/api/routers/webhooks/types.ts b/src/api/routers/webhooks/types.ts index 323b3ccb..b97f83cd 100644 --- a/src/api/routers/webhooks/types.ts +++ b/src/api/routers/webhooks/types.ts @@ -36,7 +36,7 @@ export interface ProjectContext { projectId: string; orgId: string; repo?: string; - pmType: 'trello' | 'jira'; + pmType: 'trello' | 'jira' | 'linear'; boardId?: string; jiraBaseUrl?: string; jiraProjectKey?: string; diff --git a/src/config/integrationRoles.ts b/src/config/integrationRoles.ts index 124f53df..a79cd37d 100644 --- a/src/config/integrationRoles.ts +++ b/src/config/integrationRoles.ts @@ -1,5 +1,5 @@ export type IntegrationCategory = 'pm' | 'scm' | 'alerting'; -export type IntegrationProvider = 'trello' | 'jira' | 'github' | 'sentry'; +export type IntegrationProvider = 'trello' | 'jira' | 'linear' | 'github' | 'sentry'; export interface CredentialRoleDef { role: string; @@ -35,6 +35,18 @@ const _rolesRegistry = new Map([ }, ], ], + [ + 'linear', + [ + { role: 'api_key', label: 'API Key', envVarKey: 'LINEAR_API_KEY' }, + { + role: 'webhook_secret', + label: 'Webhook Secret', + envVarKey: 'LINEAR_WEBHOOK_SECRET', + optional: true, + }, + ], + ], [ 'github', [ @@ -69,6 +81,7 @@ const _rolesRegistry = new Map([ const _categoryRegistry = new Map([ ['trello', 'pm'], ['jira', 'pm'], + ['linear', 'pm'], ['github', 'scm'], ['sentry', 'alerting'], ]); diff --git a/src/config/schema.ts b/src/config/schema.ts index 60cbe368..8dcd3246 100644 --- a/src/config/schema.ts +++ b/src/config/schema.ts @@ -36,6 +36,25 @@ const JiraConfigSchema = z.object({ .optional(), }); +const LinearConfigSchema = z.object({ + teamId: z.string().min(1), + statuses: z.record(z.string()), // CASCADE status names → Linear state IDs + labels: z + .object({ + processing: z.string().optional(), + processed: z.string().optional(), + error: z.string().optional(), + readyToProcess: z.string().optional(), + auto: z.string().optional(), + }) + .optional(), + customFields: z + .object({ + cost: z.string().optional(), + }) + .optional(), +}); + export const ProjectConfigSchema = z.object({ id: z.string().min(1), orgId: z.string().min(1), @@ -49,7 +68,7 @@ export const ProjectConfigSchema = z.object({ pm: z .object({ - type: z.enum(['trello', 'jira']).default('trello'), + type: z.enum(['trello', 'jira', 'linear']).default('trello'), }) .default({ type: 'trello' }), @@ -68,6 +87,8 @@ export const ProjectConfigSchema = z.object({ jira: JiraConfigSchema.optional(), + linear: LinearConfigSchema.optional(), + model: z.string().default(PROJECT_DEFAULTS.model), agentModels: z.record(z.string()).optional(), maxIterations: z.number().int().positive().default(PROJECT_DEFAULTS.maxIterations), diff --git a/src/integrations/bootstrap.ts b/src/integrations/bootstrap.ts index dd93db4e..ce359692 100644 --- a/src/integrations/bootstrap.ts +++ b/src/integrations/bootstrap.ts @@ -1,9 +1,10 @@ /** * Unified integration bootstrap — canonical registration point for all integrations. * - * Registers all 4 built-in integrations into the `integrationRegistry`: + * Registers all 5 built-in integrations into the `integrationRegistry`: * - TrelloIntegration (PM) * - JiraIntegration (PM) + * - LinearIntegration (PM) * - GitHubSCMIntegration (SCM) * - SentryAlertingIntegration (Alerting) * @@ -26,6 +27,7 @@ import { GitHubSCMIntegration } from '../github/scm-integration.js'; import { integrationRegistry } from '../integrations/registry.js'; import { JiraIntegration } from '../pm/jira/integration.js'; +import { LinearIntegration } from '../pm/linear/integration.js'; import { pmRegistry } from '../pm/registry.js'; import { TrelloIntegration } from '../pm/trello/integration.js'; import { SentryAlertingIntegration } from '../sentry/alerting-integration.js'; @@ -40,6 +42,11 @@ if (!pmRegistry.getOrNull('jira')) { pmRegistry.register(jira); if (!integrationRegistry.getOrNull('jira')) integrationRegistry.register(jira); } +if (!pmRegistry.getOrNull('linear')) { + const linear = new LinearIntegration(); + pmRegistry.register(linear); + if (!integrationRegistry.getOrNull('linear')) integrationRegistry.register(linear); +} if (!integrationRegistry.getOrNull('github')) { integrationRegistry.register(new GitHubSCMIntegration()); } diff --git a/src/linear/types.ts b/src/linear/types.ts index e582eec4..806e852e 100644 --- a/src/linear/types.ts +++ b/src/linear/types.ts @@ -80,6 +80,7 @@ export interface LinearCreateIssueInput { title: string; description?: string; teamId: string; + parentId?: string; assigneeId?: string; stateId?: string; priority?: number; diff --git a/src/pm/config.ts b/src/pm/config.ts index 00f1c20e..048b15a1 100644 --- a/src/pm/config.ts +++ b/src/pm/config.ts @@ -53,6 +53,29 @@ export function getJiraConfig(project: ProjectConfig): JiraConfig | undefined { return project.jira as JiraConfig | undefined; } +/** Linear-specific configuration (from project_integrations JSONB) */ +export interface LinearConfig { + teamId: string; + statuses: Record; + labels?: { + processing?: string; + processed?: string; + error?: string; + readyToProcess?: string; + auto?: string; + }; + customFields?: { cost?: string }; +} + +/** + * Get the Linear config for a project. + * Returns the config or undefined if this is not a Linear project. + */ +export function getLinearConfig(project: ProjectConfig): LinearConfig | undefined { + if (project.pm?.type !== 'linear') return undefined; + return project.linear as LinearConfig | undefined; +} + /** * Get the cost custom field ID for a project, regardless of PM type. */ @@ -60,5 +83,8 @@ export function getCostFieldId(project: ProjectConfig): string | undefined { if (project.pm?.type === 'jira') { return getJiraConfig(project)?.customFields?.cost; } + if (project.pm?.type === 'linear') { + return getLinearConfig(project)?.customFields?.cost; + } return getTrelloConfig(project)?.customFields?.cost; } diff --git a/src/pm/index.ts b/src/pm/index.ts index b8364a76..e4a531c8 100644 --- a/src/pm/index.ts +++ b/src/pm/index.ts @@ -4,6 +4,7 @@ export type { PMIntegration, PMWebhookEvent } from './integration.js'; export { JiraPMProvider } from './jira/adapter.js'; export type { ProjectPMConfig } from './lifecycle.js'; export { hasAutoLabel, PMLifecycleManager, resolveProjectPMConfig } from './lifecycle.js'; +export { LinearPMProvider } from './linear/adapter.js'; export { extractMarkdownImages, filterImageMedia, diff --git a/src/pm/linear/adapter.ts b/src/pm/linear/adapter.ts new file mode 100644 index 00000000..fb7c1e61 --- /dev/null +++ b/src/pm/linear/adapter.ts @@ -0,0 +1,332 @@ +/** + * LinearPMProvider — wraps linearClient to implement the PMProvider interface. + * + * Assumes linearClient credentials are already in scope via withLinearCredentials(). + * + * Linear does not have native checklists. We model them using child issues + * (sub-issues), following the same pattern used by JiraPMProvider for subtasks. + */ + +import { linearClient } from '../../linear/client.js'; +import { logger } from '../../utils/logging.js'; +import type { LinearConfig } from '../config.js'; +import type { + Attachment, + Checklist, + ChecklistItem, + CreateWorkItemConfig, + ListWorkItemsFilter, + PMProvider, + WorkItem, + WorkItemComment, + WorkItemLabel, +} from '../types.js'; + +export class LinearPMProvider implements PMProvider { + readonly type = 'linear' as const; + + constructor(private config: LinearConfig) {} + + async getWorkItem(id: string): Promise { + const issue = await linearClient.getIssue(id); + return { + id: issue.identifier || issue.id, + title: issue.title, + description: issue.description ?? '', + url: issue.url, + status: issue.state?.name, + labels: issue.labels.map( + (l): WorkItemLabel => ({ + id: l.id, + name: l.name, + color: l.color, + }), + ), + }; + } + + async getWorkItemComments(id: string): Promise { + const comments = await linearClient.getIssueComments(id); + return comments.map((c) => ({ + id: c.id, + date: c.createdAt, + text: c.body, + author: { + id: c.user?.id ?? '', + name: c.user?.displayName ?? c.user?.name ?? '', + username: c.user?.email ?? '', + }, + })); + } + + async updateWorkItem( + id: string, + updates: { title?: string; description?: string }, + ): Promise { + await linearClient.updateIssue(id, { + title: updates.title, + description: updates.description, + }); + } + + async addComment(id: string, text: string): Promise { + const comment = await linearClient.createComment(id, text); + return comment.id; + } + + async updateComment(_id: string, commentId: string, text: string): Promise { + await linearClient.updateComment(commentId, text); + } + + async createWorkItem(config: CreateWorkItemConfig): Promise { + const teamId = config.containerId || this.config.teamId; + const issue = await linearClient.createIssue({ + teamId, + title: config.title, + description: config.description, + ...(config.labels?.length + ? { + labelIds: config.labels + .map((name) => (this.config.labels as Record | undefined)?.[name]) + .filter((id): id is string => !!id), + } + : {}), + }); + + // Transition to backlog status if configured + const backlogStatus = this.config.statuses?.backlog; + if (backlogStatus) { + try { + await this.moveWorkItem(issue.id, backlogStatus); + } catch (err) { + logger.warn('[Linear] Failed to transition new issue to backlog status', { + issueId: issue.id, + targetStatus: backlogStatus, + error: String(err), + }); + } + } + + return { + id: issue.identifier || issue.id, + title: issue.title, + description: issue.description ?? '', + url: issue.url, + labels: [], + }; + } + + async listWorkItems(containerId: string, filter?: ListWorkItemsFilter): Promise { + // containerId is the Linear team ID + const teamId = containerId || this.config.teamId; + const issues = await linearClient.listIssues({ + teamId, + ...(filter?.status + ? { + stateId: this.config.statuses?.[filter.status] ?? filter.status, + } + : {}), + }); + return issues.map((issue) => ({ + id: issue.identifier || issue.id, + title: issue.title, + description: issue.description ?? '', + url: issue.url, + status: issue.state?.name, + labels: issue.labels.map( + (l): WorkItemLabel => ({ + id: l.id, + name: l.name, + color: l.color, + }), + ), + })); + } + + async moveWorkItem(id: string, destination: string): Promise { + // destination is a Linear state name or ID from config.statuses + const stateId = this.config.statuses?.[destination] ?? destination; + await linearClient.updateIssueState(id, stateId); + } + + async addLabel(id: string, labelIdOrName: string): Promise { + // Resolve name → ID via config if possible + const labelId = + (this.config.labels as Record | undefined)?.[labelIdOrName] ?? labelIdOrName; + await linearClient.addLabel(id, labelId); + } + + async removeLabel(id: string, labelIdOrName: string): Promise { + const labelId = + (this.config.labels as Record | undefined)?.[labelIdOrName] ?? labelIdOrName; + await linearClient.removeLabel(id, labelId); + } + + async getChecklists(workItemId: string): Promise { + // Linear doesn't have native checklists — map child issues (sub-issues) + // We fetch the issue's children by listing issues with parentId filter. + // The linearClient doesn't expose a direct children query, so we use + // a workaround: list issues filtered by parent identifier. + // Since linearClient.listIssues() doesn't support parentId filter + // directly, we fall back to getting the issue and checking its + // children via the GraphQL API through getIssue() which doesn't + // return children. We'll use a workaround using the attachment/comment + // based "pseudo-checklist" pattern with a dedicated sub-issue list call. + // + // For now, use listIssues with a parent identifier approach: + // Linear's filter supports parent.id, but our client doesn't expose that. + // Return an empty list and rely on the item-level operations for now. + // This is consistent with how the JIRA implementation works for empty subtask lists. + logger.debug('[Linear] getChecklists — returning empty list (sub-issues not yet cached)', { + workItemId, + }); + return [ + { + id: `subtasks-${workItemId}`, + name: 'Sub-issues', + workItemId, + items: [] as ChecklistItem[], + }, + ]; + } + + async createChecklist(workItemId: string, name: string): Promise { + // In Linear, "create checklist" = create a parent context. + // Items will be sub-issues created via addChecklistItem. + return { + id: `checklist-${workItemId}-${Date.now()}`, + name, + workItemId, + items: [], + }; + } + + async addChecklistItem( + checklistId: string, + name: string, + _checked = false, + description?: string, + ): Promise { + // Extract parent issue ID from checklistId format: + // "checklist--" or "subtasks-" + const match = checklistId.match(/(?:checklist|subtasks)-(.+?)(?:-\d{10,})?$/); + const parentId = match?.[1]; + if (!parentId) { + throw new Error(`Cannot extract parent issue ID from checklist ID: ${checklistId}`); + } + + await linearClient.createIssue({ + teamId: this.config.teamId, + title: name, + description, + parentId, + }); + logger.debug('[Linear] addChecklistItem — created sub-issue', { parentId, title: name }); + } + + async updateChecklistItem( + _workItemId: string, + checkItemId: string, + complete: boolean, + ): Promise { + // checkItemId is a Linear issue ID (sub-issue) + const targetStatus = complete + ? (this.config.statuses?.done ?? 'Done') + : (this.config.statuses?.backlog ?? 'Todo'); + await this.moveWorkItem(checkItemId, targetStatus); + } + + async deleteChecklistItem(_workItemId: string, checkItemId: string): Promise { + // Linear doesn't support issue deletion via API — transition to cancelled state + // We try to find a cancelled/done state and transition to it. + const cancelledStateId = this.config.statuses?.cancelled ?? this.config.statuses?.done ?? null; + + if (cancelledStateId) { + try { + await linearClient.updateIssueState(checkItemId, cancelledStateId); + logger.info('[Linear] deleteChecklistItem — transitioned sub-issue to terminal state', { + checkItemId, + stateId: cancelledStateId, + }); + return; + } catch (err) { + logger.warn('[Linear] Failed to transition sub-issue to terminal state', { + checkItemId, + error: String(err), + }); + } + } + + logger.warn('[Linear] deleteChecklistItem — no terminal state configured, skipping', { + checkItemId, + }); + } + + async getAttachments(workItemId: string): Promise { + const attachments = await linearClient.getAttachments(workItemId); + return attachments.map((a) => ({ + id: a.id, + name: a.title, + url: a.url, + mimeType: (a.metadata?.mimeType as string) ?? 'application/octet-stream', + bytes: (a.metadata?.size as number) ?? 0, + date: a.createdAt, + })); + } + + async addAttachment(workItemId: string, url: string, name: string): Promise { + await linearClient.createAttachment(workItemId, { title: name, url }); + } + + async addAttachmentFile( + workItemId: string, + _buffer: Buffer, + name: string, + _mimeType: string, + ): Promise { + // Linear doesn't support binary file uploads — add as a comment with a placeholder. + // This mirrors the JIRA addAttachment fallback for URL-only attachments. + await this.addComment(workItemId, `Attachment: ${name} (binary upload not supported)`); + } + + async getCustomFieldNumber(_workItemId: string, _fieldId: string): Promise { + // Linear doesn't have generic custom number fields. + // Return 0 as a safe fallback. + return 0; + } + + async updateCustomFieldNumber( + _workItemId: string, + _fieldId: string, + _value: number, + ): Promise { + // Linear doesn't have generic custom number fields — no-op. + logger.warn('[Linear] updateCustomFieldNumber — not supported, skipping', { _fieldId }); + } + + async linkPR(workItemId: string, prUrl: string, prTitle: string): Promise { + await linearClient.createAttachment(workItemId, { + title: prTitle, + url: prUrl, + subtitle: 'Pull Request', + metadata: { type: 'github_pr' }, + }); + } + + getWorkItemUrl(id: string): string { + // Linear URLs follow pattern: https://linear.app/team/issue/TEAM-123 + // The id here may be the identifier (TEAM-123) or internal UUID. + // The issue.url from the API is already correct; for URL construction + // from an identifier alone we fall back to a generic format. + return `https://linear.app/issue/${id}`; + } + + async getAuthenticatedUser(): Promise<{ id: string; name: string; username: string }> { + const user = await linearClient.getMe(); + return { + id: user.id, + name: user.displayName || user.name, + username: user.email, + }; + } +} diff --git a/src/pm/linear/integration.ts b/src/pm/linear/integration.ts new file mode 100644 index 00000000..c6985eae --- /dev/null +++ b/src/pm/linear/integration.ts @@ -0,0 +1,217 @@ +/** + * LinearIntegration — implements PMIntegration for Linear. + * + * Encapsulates all Linear-specific concerns: credential resolution, + * webhook parsing, ack comments, reactions, project lookup, and work item ID + * extraction. + * + * Credential roles are self-registered at module load time via + * registerCredentialRoles(), so no changes to integrationRoles.ts are needed. + */ + +import { + PROVIDER_CREDENTIAL_ROLES, + registerCredentialRoles, +} from '../../config/integrationRoles.js'; +import { getIntegrationCredential, getIntegrationCredentialOrNull } from '../../config/provider.js'; +import { getIntegrationProvider } from '../../db/repositories/credentialsRepository.js'; +import { withLinearCredentials } from '../../linear/client.js'; +import type { CascadeConfig, ProjectConfig } from '../../types/index.js'; +import { getLinearConfig } from '../config.js'; +import type { PMIntegration, PMWebhookEvent } from '../integration.js'; +import type { ProjectPMConfig } from '../lifecycle.js'; +import type { PMProvider } from '../types.js'; +import { LinearPMProvider } from './adapter.js'; + +// Self-register credential roles at module load time. +// This is idempotent — safe to call multiple times. +registerCredentialRoles('linear', 'pm', [ + { role: 'api_key', label: 'API Key', envVarKey: 'LINEAR_API_KEY' }, + { + role: 'webhook_secret', + label: 'Webhook Secret', + envVarKey: 'LINEAR_WEBHOOK_SECRET', + optional: true, + }, +]); + +// Linear issue identifier pattern: TEAM-123 +const LINEAR_ISSUE_KEY_REGEX = /\b([A-Z][A-Z0-9]+-\d+)\b/; + +export class LinearIntegration implements PMIntegration { + readonly type = 'linear'; + readonly category = 'pm' as const; + + async hasIntegration(projectId: string): Promise { + const provider = await getIntegrationProvider(projectId, 'pm'); + if (provider !== 'linear') return false; + + const roles = PROVIDER_CREDENTIAL_ROLES.linear; + const requiredRoles = roles.filter((r) => !r.optional); + const values = await Promise.all( + requiredRoles.map((roleDef) => getIntegrationCredentialOrNull(projectId, 'pm', roleDef.role)), + ); + return values.every((v) => v !== null); + } + + createProvider(project: ProjectConfig): PMProvider { + const linearConfig = getLinearConfig(project); + if (!linearConfig?.teamId) { + throw new Error('Linear integration requires teamId in config'); + } + return new LinearPMProvider(linearConfig); + } + + async withCredentials(projectId: string, fn: () => Promise): Promise { + const apiKey = await getIntegrationCredential(projectId, 'pm', 'api_key'); + return withLinearCredentials({ apiKey }, fn); + } + + resolveLifecycleConfig(project: ProjectConfig): ProjectPMConfig { + const linearConfig = getLinearConfig(project); + const labels = linearConfig?.labels; + return { + labels: { + processing: labels?.processing ?? 'cascade-processing', + processed: labels?.processed ?? 'cascade-processed', + error: labels?.error ?? 'cascade-error', + readyToProcess: labels?.readyToProcess ?? 'cascade-ready', + auto: labels?.auto ?? 'cascade-auto', + }, + statuses: { + backlog: linearConfig?.statuses?.backlog, + inProgress: linearConfig?.statuses?.inProgress, + inReview: linearConfig?.statuses?.inReview, + done: linearConfig?.statuses?.done, + merged: linearConfig?.statuses?.merged, + }, + }; + } + + parseWebhookPayload(raw: unknown): PMWebhookEvent | null { + if (!raw || typeof raw !== 'object') return null; + const p = raw as Record; + + // Linear webhook shape: { action, type, data, organizationId, ... } + const action = p.action as string | undefined; + const type = p.type as string | undefined; + if (typeof action !== 'string' || typeof type !== 'string') return null; + + const data = p.data as Record | undefined; + if (!data) return null; + + // The event type is "." e.g. "Issue.create", "Comment.create" + const eventType = `${type}.${action}`; + + // For Issue events, data.teamId is the project identifier + // For Comment events, the issue identifier is in data.issue.identifier + let projectIdentifier: string | undefined; + let workItemId: string | undefined; + + if (type === 'Issue') { + projectIdentifier = data.teamId as string | undefined; + workItemId = (data.identifier as string | undefined) ?? (data.id as string | undefined); + } else if (type === 'Comment') { + const issue = data.issue as Record | undefined; + projectIdentifier = issue?.teamId as string | undefined; + workItemId = (issue?.identifier as string | undefined) ?? (issue?.id as string | undefined); + } else { + // For other event types, try to find a teamId in data + projectIdentifier = data.teamId as string | undefined; + } + + if (!projectIdentifier) return null; + + return { + eventType, + projectIdentifier, + workItemId, + raw, + }; + } + + async isSelfAuthored(event: PMWebhookEvent, projectId: string): Promise { + // For comment events, check if the comment was authored by the bot user. + // Linear comments have a userId in the data. + if (!event.eventType.startsWith('Comment.')) return false; + + const p = event.raw as Record; + const data = p.data as Record | undefined; + const commentUserId = data?.userId as string | undefined; + if (!commentUserId) return false; + + try { + // Get the authenticated user to compare — credentials must be in scope. + const apiKey = await getIntegrationCredential(projectId, 'pm', 'api_key'); + const { linearClient } = await import('../../linear/client.js'); + const me = await withLinearCredentials({ apiKey }, () => linearClient.getMe()); + return me.id === commentUserId; + } catch { + return false; + } + } + + async postAckComment( + projectId: string, + workItemId: string, + message: string, + ): Promise { + try { + const apiKey = await getIntegrationCredential(projectId, 'pm', 'api_key'); + return await withLinearCredentials({ apiKey }, async () => { + const { linearClient } = await import('../../linear/client.js'); + const comment = await linearClient.createComment(workItemId, message); + return comment.id; + }); + } catch (err) { + const { logger } = await import('../../utils/logging.js'); + logger.warn('[Linear] Failed to post ack comment', { + projectId, + workItemId, + error: String(err), + }); + return null; + } + } + + async deleteAckComment(projectId: string, _workItemId: string, commentId: string): Promise { + try { + const apiKey = await getIntegrationCredential(projectId, 'pm', 'api_key'); + await withLinearCredentials({ apiKey }, async () => { + const { linearClient } = await import('../../linear/client.js'); + await linearClient.deleteComment(commentId); + }); + } catch (err) { + const { logger } = await import('../../utils/logging.js'); + logger.warn('[Linear] Failed to delete ack comment', { + projectId, + commentId, + error: String(err), + }); + } + } + + async sendReaction(_projectId: string, _event: PMWebhookEvent): Promise { + // Linear reactions require a dedicated API call with credentials. + // Reactions are optional in the PMIntegration interface — no-op for now. + } + + async lookupProject( + _identifier: string, + ): Promise<{ project: ProjectConfig; config: CascadeConfig } | null> { + // Linear project lookup by teamId is not yet implemented in the config + // repository (separate story). Return null to fall through to other lookup + // mechanisms. + return null; + } + + extractWorkItemId(text: string): string | null { + // Linear issue identifiers follow the same TEAM-123 pattern as JIRA. + // Also check Linear URLs: https://linear.app/org/issue/TEAM-123 + const urlMatch = text.match(/https:\/\/linear\.app\/[^/]+\/issue\/([A-Z][A-Z0-9]+-\d+)/); + if (urlMatch) return urlMatch[1]; + + const match = text.match(LINEAR_ISSUE_KEY_REGEX); + return match ? match[1] : null; + } +} diff --git a/src/pm/types.ts b/src/pm/types.ts index e4d3aa33..bc4e5308 100644 --- a/src/pm/types.ts +++ b/src/pm/types.ts @@ -3,7 +3,7 @@ * future project-management integrations must implement. */ -export type PMType = 'trello' | 'jira'; +export type PMType = 'trello' | 'jira' | 'linear'; /** * A reference to an inline media item (image, etc.) embedded in a work item diff --git a/src/router/config.ts b/src/router/config.ts index 83029e50..c20e5a23 100644 --- a/src/router/config.ts +++ b/src/router/config.ts @@ -1,12 +1,12 @@ import { loadConfig } from '../config/provider.js'; -import { getJiraConfig, getTrelloConfig } from '../pm/config.js'; +import { getJiraConfig, getLinearConfig, getTrelloConfig } from '../pm/config.js'; import type { CascadeConfig, ProjectConfig } from '../types/index.js'; // Minimal config types - what router needs for quick filtering export interface RouterProjectConfig { id: string; repo?: string; // owner/repo format (optional for projects without SCM integration) - pmType: 'trello' | 'jira'; + pmType: 'trello' | 'jira' | 'linear'; trello?: { boardId: string; lists: Record; @@ -16,6 +16,9 @@ export interface RouterProjectConfig { projectKey: string; baseUrl: string; }; + linear?: { + teamId: string; + }; } export interface RouterConfig { @@ -84,6 +87,7 @@ export async function loadProjectConfig(): Promise<{ projects: config.projects.map((p) => { const trelloConfig = getTrelloConfig(p); const jiraConfig = getJiraConfig(p); + const linearConfig = getLinearConfig(p); return { id: p.id, repo: p.repo, @@ -101,6 +105,11 @@ export async function loadProjectConfig(): Promise<{ baseUrl: jiraConfig.baseUrl, }, }), + ...(linearConfig && { + linear: { + teamId: linearConfig.teamId, + }, + }), }; }), fullProjects: config.projects, diff --git a/tests/unit/pm/linear/adapter.test.ts b/tests/unit/pm/linear/adapter.test.ts new file mode 100644 index 00000000..552d2af2 --- /dev/null +++ b/tests/unit/pm/linear/adapter.test.ts @@ -0,0 +1,536 @@ +import { beforeEach, describe, expect, it, vi } from 'vitest'; + +// --------------------------------------------------------------------------- +// Mocks +// --------------------------------------------------------------------------- + +const mockGetIssue = vi.fn(); +const mockGetIssueComments = vi.fn(); +const mockCreateComment = vi.fn(); +const mockUpdateComment = vi.fn(); +const mockCreateIssue = vi.fn(); +const mockUpdateIssue = vi.fn(); +const mockUpdateIssueState = vi.fn(); +const mockListIssues = vi.fn(); +const mockAddLabel = vi.fn(); +const mockRemoveLabel = vi.fn(); +const mockGetAttachments = vi.fn(); +const mockCreateAttachment = vi.fn(); +const mockGetMe = vi.fn(); + +vi.mock('../../../../src/linear/client.js', () => ({ + linearClient: { + getIssue: (...args: unknown[]) => mockGetIssue(...args), + getIssueComments: (...args: unknown[]) => mockGetIssueComments(...args), + createComment: (...args: unknown[]) => mockCreateComment(...args), + updateComment: (...args: unknown[]) => mockUpdateComment(...args), + createIssue: (...args: unknown[]) => mockCreateIssue(...args), + updateIssue: (...args: unknown[]) => mockUpdateIssue(...args), + updateIssueState: (...args: unknown[]) => mockUpdateIssueState(...args), + listIssues: (...args: unknown[]) => mockListIssues(...args), + addLabel: (...args: unknown[]) => mockAddLabel(...args), + removeLabel: (...args: unknown[]) => mockRemoveLabel(...args), + getAttachments: (...args: unknown[]) => mockGetAttachments(...args), + createAttachment: (...args: unknown[]) => mockCreateAttachment(...args), + getMe: (...args: unknown[]) => mockGetMe(...args), + }, +})); + +import { LinearPMProvider } from '../../../../src/pm/linear/adapter.js'; + +// --------------------------------------------------------------------------- +// Helpers +// --------------------------------------------------------------------------- + +const defaultConfig = { + teamId: 'team-abc', + statuses: { + backlog: 'state-backlog', + inProgress: 'state-in-progress', + inReview: 'state-in-review', + done: 'state-done', + merged: 'state-merged', + cancelled: 'state-cancelled', + }, + labels: { + processing: 'label-processing-id', + }, +}; + +function makeIssue(overrides: Record = {}) { + return { + id: 'issue-uuid', + identifier: 'TEAM-1', + title: 'Test Issue', + description: 'A description', + priority: 0, + priorityLabel: 'No priority', + state: { id: 'state-backlog', name: 'Backlog', type: 'backlog', color: '#ccc' }, + team: { id: 'team-abc', name: 'Team ABC', key: 'TEAM', description: null }, + assignee: null, + labels: [], + url: 'https://linear.app/org/issue/TEAM-1', + createdAt: '2024-01-01T00:00:00Z', + updatedAt: '2024-01-01T00:00:00Z', + ...overrides, + }; +} + +// --------------------------------------------------------------------------- +// Tests +// --------------------------------------------------------------------------- + +describe('LinearPMProvider', () => { + let provider: LinearPMProvider; + + beforeEach(() => { + provider = new LinearPMProvider(defaultConfig); + vi.clearAllMocks(); + }); + + it('has type "linear"', () => { + expect(provider.type).toBe('linear'); + }); + + // ========================================================================= + // getWorkItem + // ========================================================================= + describe('getWorkItem', () => { + it('maps a Linear issue to a WorkItem', async () => { + mockGetIssue.mockResolvedValue( + makeIssue({ + labels: [{ id: 'label-1', name: 'Bug', color: '#f00', description: null }], + }), + ); + + const result = await provider.getWorkItem('issue-uuid'); + + expect(mockGetIssue).toHaveBeenCalledWith('issue-uuid'); + expect(result.id).toBe('TEAM-1'); // uses identifier + expect(result.title).toBe('Test Issue'); + expect(result.description).toBe('A description'); + expect(result.url).toBe('https://linear.app/org/issue/TEAM-1'); + expect(result.status).toBe('Backlog'); + expect(result.labels).toHaveLength(1); + expect(result.labels[0]).toEqual({ id: 'label-1', name: 'Bug', color: '#f00' }); + }); + + it('uses id when identifier is empty', async () => { + mockGetIssue.mockResolvedValue(makeIssue({ identifier: '' })); + const result = await provider.getWorkItem('issue-uuid'); + expect(result.id).toBe('issue-uuid'); + }); + + it('returns empty string for null description', async () => { + mockGetIssue.mockResolvedValue(makeIssue({ description: null })); + const result = await provider.getWorkItem('issue-uuid'); + expect(result.description).toBe(''); + }); + }); + + // ========================================================================= + // getWorkItemComments + // ========================================================================= + describe('getWorkItemComments', () => { + it('maps Linear comments to WorkItemComment[]', async () => { + mockGetIssueComments.mockResolvedValue([ + { + id: 'c1', + body: 'Hello world', + createdAt: '2024-01-02T00:00:00Z', + updatedAt: '2024-01-02T00:00:00Z', + issueId: 'issue-uuid', + user: { + id: 'u1', + name: 'Alice', + email: 'alice@example.com', + displayName: 'Alice Smith', + avatarUrl: null, + active: true, + }, + }, + ]); + + const result = await provider.getWorkItemComments('issue-uuid'); + + expect(result).toHaveLength(1); + expect(result[0].id).toBe('c1'); + expect(result[0].text).toBe('Hello world'); + expect(result[0].author.id).toBe('u1'); + expect(result[0].author.name).toBe('Alice Smith'); + expect(result[0].author.username).toBe('alice@example.com'); + }); + + it('handles comments with no user', async () => { + mockGetIssueComments.mockResolvedValue([ + { + id: 'c2', + body: 'Bot comment', + createdAt: '2024-01-01T00:00:00Z', + updatedAt: '2024-01-01T00:00:00Z', + issueId: 'issue-uuid', + user: null, + }, + ]); + + const result = await provider.getWorkItemComments('issue-uuid'); + + expect(result[0].author.id).toBe(''); + expect(result[0].author.name).toBe(''); + expect(result[0].author.username).toBe(''); + }); + }); + + // ========================================================================= + // updateWorkItem + // ========================================================================= + describe('updateWorkItem', () => { + it('calls updateIssue with title and description', async () => { + mockUpdateIssue.mockResolvedValue(makeIssue()); + await provider.updateWorkItem('issue-uuid', { title: 'New title', description: 'New desc' }); + expect(mockUpdateIssue).toHaveBeenCalledWith('issue-uuid', { + title: 'New title', + description: 'New desc', + }); + }); + }); + + // ========================================================================= + // addComment + // ========================================================================= + describe('addComment', () => { + it('creates a comment and returns its id', async () => { + mockCreateComment.mockResolvedValue({ id: 'comment-new', body: 'hi' }); + const result = await provider.addComment('issue-uuid', 'hi there'); + expect(mockCreateComment).toHaveBeenCalledWith('issue-uuid', 'hi there'); + expect(result).toBe('comment-new'); + }); + }); + + // ========================================================================= + // updateComment + // ========================================================================= + describe('updateComment', () => { + it('updates comment by commentId (not issueId)', async () => { + mockUpdateComment.mockResolvedValue({ id: 'c1', body: 'updated' }); + await provider.updateComment('issue-uuid', 'c1', 'updated body'); + expect(mockUpdateComment).toHaveBeenCalledWith('c1', 'updated body'); + }); + }); + + // ========================================================================= + // createWorkItem + // ========================================================================= + describe('createWorkItem', () => { + it('creates an issue in the given team', async () => { + mockCreateIssue.mockResolvedValue(makeIssue({ identifier: 'TEAM-2', title: 'New Story' })); + mockUpdateIssueState.mockResolvedValue(makeIssue()); + + const result = await provider.createWorkItem({ + containerId: 'team-abc', + title: 'New Story', + description: 'A story', + }); + + expect(mockCreateIssue).toHaveBeenCalledWith( + expect.objectContaining({ teamId: 'team-abc', title: 'New Story' }), + ); + expect(result.id).toBe('TEAM-2'); + expect(result.title).toBe('New Story'); + }); + + it('falls back to config teamId when containerId is empty', async () => { + mockCreateIssue.mockResolvedValue(makeIssue()); + mockUpdateIssueState.mockResolvedValue(makeIssue()); + + await provider.createWorkItem({ containerId: '', title: 'Test' }); + + expect(mockCreateIssue).toHaveBeenCalledWith(expect.objectContaining({ teamId: 'team-abc' })); + }); + + it('transitions to backlog status after creation', async () => { + mockCreateIssue.mockResolvedValue(makeIssue()); + mockUpdateIssueState.mockResolvedValue(makeIssue()); + + await provider.createWorkItem({ containerId: 'team-abc', title: 'Test' }); + + expect(mockUpdateIssueState).toHaveBeenCalledWith('issue-uuid', 'state-backlog'); + }); + }); + + // ========================================================================= + // listWorkItems + // ========================================================================= + describe('listWorkItems', () => { + it('lists issues for a team', async () => { + mockListIssues.mockResolvedValue([makeIssue(), makeIssue({ identifier: 'TEAM-2' })]); + + const result = await provider.listWorkItems('team-abc'); + + expect(mockListIssues).toHaveBeenCalledWith(expect.objectContaining({ teamId: 'team-abc' })); + expect(result).toHaveLength(2); + }); + }); + + // ========================================================================= + // moveWorkItem + // ========================================================================= + describe('moveWorkItem', () => { + it('resolves status name to state ID from config', async () => { + mockUpdateIssueState.mockResolvedValue(makeIssue()); + + await provider.moveWorkItem('issue-uuid', 'done'); + + expect(mockUpdateIssueState).toHaveBeenCalledWith('issue-uuid', 'state-done'); + }); + + it('passes destination directly when not in config', async () => { + mockUpdateIssueState.mockResolvedValue(makeIssue()); + + await provider.moveWorkItem('issue-uuid', 'unknown-state-id'); + + expect(mockUpdateIssueState).toHaveBeenCalledWith('issue-uuid', 'unknown-state-id'); + }); + }); + + // ========================================================================= + // addLabel / removeLabel + // ========================================================================= + describe('addLabel', () => { + it('resolves label name to ID from config', async () => { + mockAddLabel.mockResolvedValue(makeIssue()); + + await provider.addLabel('issue-uuid', 'processing'); + + expect(mockAddLabel).toHaveBeenCalledWith('issue-uuid', 'label-processing-id'); + }); + + it('passes label ID directly when not in config', async () => { + mockAddLabel.mockResolvedValue(makeIssue()); + + await provider.addLabel('issue-uuid', 'raw-label-id'); + + expect(mockAddLabel).toHaveBeenCalledWith('issue-uuid', 'raw-label-id'); + }); + }); + + describe('removeLabel', () => { + it('resolves label name to ID from config', async () => { + mockRemoveLabel.mockResolvedValue(makeIssue()); + + await provider.removeLabel('issue-uuid', 'processing'); + + expect(mockRemoveLabel).toHaveBeenCalledWith('issue-uuid', 'label-processing-id'); + }); + }); + + // ========================================================================= + // getChecklists + // ========================================================================= + describe('getChecklists', () => { + it('returns a placeholder checklist', async () => { + const result = await provider.getChecklists('issue-uuid'); + expect(result).toHaveLength(1); + expect(result[0].id).toBe('subtasks-issue-uuid'); + expect(result[0].name).toBe('Sub-issues'); + expect(result[0].workItemId).toBe('issue-uuid'); + expect(result[0].items).toEqual([]); + }); + }); + + // ========================================================================= + // createChecklist + // ========================================================================= + describe('createChecklist', () => { + it('returns a synthetic checklist object', async () => { + const result = await provider.createChecklist('issue-uuid', 'Acceptance Criteria'); + expect(result.workItemId).toBe('issue-uuid'); + expect(result.name).toBe('Acceptance Criteria'); + expect(result.id).toMatch(/^checklist-issue-uuid-\d+$/); + expect(result.items).toEqual([]); + }); + }); + + // ========================================================================= + // addChecklistItem + // ========================================================================= + describe('addChecklistItem', () => { + it('creates a sub-issue when parent ID is extractable', async () => { + mockCreateIssue.mockResolvedValue(makeIssue()); + + await provider.addChecklistItem('subtasks-issue-uuid', 'Sub-task 1'); + + expect(mockCreateIssue).toHaveBeenCalledWith( + expect.objectContaining({ title: 'Sub-task 1', teamId: 'team-abc' }), + ); + }); + + it('throws when checklistId has no extractable parent', async () => { + await expect(provider.addChecklistItem('invalid-id', 'Sub-task')).rejects.toThrow( + 'Cannot extract parent issue ID from checklist ID: invalid-id', + ); + }); + }); + + // ========================================================================= + // updateChecklistItem + // ========================================================================= + describe('updateChecklistItem', () => { + it('transitions sub-issue to done state when complete=true', async () => { + mockUpdateIssueState.mockResolvedValue(makeIssue()); + + await provider.updateChecklistItem('parent-uuid', 'sub-uuid', true); + + expect(mockUpdateIssueState).toHaveBeenCalledWith('sub-uuid', 'state-done'); + }); + + it('transitions sub-issue to backlog state when complete=false', async () => { + mockUpdateIssueState.mockResolvedValue(makeIssue()); + + await provider.updateChecklistItem('parent-uuid', 'sub-uuid', false); + + expect(mockUpdateIssueState).toHaveBeenCalledWith('sub-uuid', 'state-backlog'); + }); + }); + + // ========================================================================= + // deleteChecklistItem + // ========================================================================= + describe('deleteChecklistItem', () => { + it('transitions to cancelled state when configured', async () => { + mockUpdateIssueState.mockResolvedValue(makeIssue()); + + await provider.deleteChecklistItem('parent-uuid', 'sub-uuid'); + + expect(mockUpdateIssueState).toHaveBeenCalledWith('sub-uuid', 'state-cancelled'); + }); + + it('falls back to done state when no cancelled state configured', async () => { + const providerNoCancelled = new LinearPMProvider({ + teamId: 'team-abc', + statuses: { done: 'state-done' }, + }); + mockUpdateIssueState.mockResolvedValue(makeIssue()); + + await providerNoCancelled.deleteChecklistItem('parent-uuid', 'sub-uuid'); + + expect(mockUpdateIssueState).toHaveBeenCalledWith('sub-uuid', 'state-done'); + }); + }); + + // ========================================================================= + // getAttachments + // ========================================================================= + describe('getAttachments', () => { + it('maps Linear attachments to Attachment[]', async () => { + mockGetAttachments.mockResolvedValue([ + { + id: 'att-1', + title: 'Screenshot', + url: 'https://storage.linear.app/att-1', + subtitle: null, + metadata: { mimeType: 'image/png', size: 12345 }, + createdAt: '2024-01-01T00:00:00Z', + updatedAt: '2024-01-01T00:00:00Z', + }, + ]); + + const result = await provider.getAttachments('issue-uuid'); + + expect(result).toHaveLength(1); + expect(result[0].id).toBe('att-1'); + expect(result[0].name).toBe('Screenshot'); + expect(result[0].url).toBe('https://storage.linear.app/att-1'); + expect(result[0].mimeType).toBe('image/png'); + expect(result[0].bytes).toBe(12345); + }); + }); + + // ========================================================================= + // addAttachment + // ========================================================================= + describe('addAttachment', () => { + it('creates an attachment link', async () => { + mockCreateAttachment.mockResolvedValue({ id: 'att-new' }); + + await provider.addAttachment('issue-uuid', 'https://example.com/file.pdf', 'Report'); + + expect(mockCreateAttachment).toHaveBeenCalledWith('issue-uuid', { + title: 'Report', + url: 'https://example.com/file.pdf', + }); + }); + }); + + // ========================================================================= + // linkPR + // ========================================================================= + describe('linkPR', () => { + it('creates an attachment for the PR', async () => { + mockCreateAttachment.mockResolvedValue({ id: 'att-pr' }); + + await provider.linkPR( + 'issue-uuid', + 'https://github.com/org/repo/pull/42', + 'feat: add linear', + ); + + expect(mockCreateAttachment).toHaveBeenCalledWith('issue-uuid', { + title: 'feat: add linear', + url: 'https://github.com/org/repo/pull/42', + subtitle: 'Pull Request', + metadata: { type: 'github_pr' }, + }); + }); + }); + + // ========================================================================= + // getWorkItemUrl + // ========================================================================= + describe('getWorkItemUrl', () => { + it('constructs a Linear issue URL', () => { + expect(provider.getWorkItemUrl('TEAM-123')).toBe('https://linear.app/issue/TEAM-123'); + }); + }); + + // ========================================================================= + // getAuthenticatedUser + // ========================================================================= + describe('getAuthenticatedUser', () => { + it('returns the authenticated user', async () => { + mockGetMe.mockResolvedValue({ + id: 'user-bot', + name: 'Bot User', + email: 'bot@example.com', + displayName: 'Cascade Bot', + avatarUrl: null, + active: true, + }); + + const user = await provider.getAuthenticatedUser(); + + expect(user.id).toBe('user-bot'); + expect(user.name).toBe('Cascade Bot'); // prefers displayName + expect(user.username).toBe('bot@example.com'); + }); + }); + + // ========================================================================= + // getCustomFieldNumber / updateCustomFieldNumber + // ========================================================================= + describe('getCustomFieldNumber', () => { + it('returns 0 (not supported)', async () => { + const result = await provider.getCustomFieldNumber('issue-uuid', 'custom-field'); + expect(result).toBe(0); + }); + }); + + describe('updateCustomFieldNumber', () => { + it('is a no-op (not supported)', async () => { + // Should not throw + await expect( + provider.updateCustomFieldNumber('issue-uuid', 'custom-field', 42), + ).resolves.toBeUndefined(); + }); + }); +}); diff --git a/tests/unit/pm/linear/integration.test.ts b/tests/unit/pm/linear/integration.test.ts new file mode 100644 index 00000000..9b5df82d --- /dev/null +++ b/tests/unit/pm/linear/integration.test.ts @@ -0,0 +1,419 @@ +import { beforeEach, describe, expect, it, vi } from 'vitest'; + +// --------------------------------------------------------------------------- +// Mocks +// --------------------------------------------------------------------------- + +const mockGetIntegrationCredential = vi.fn(); +const mockGetIntegrationCredentialOrNull = vi.fn(); + +vi.mock('../../../../src/config/provider.js', () => ({ + getIntegrationCredential: (...args: unknown[]) => mockGetIntegrationCredential(...args), + getIntegrationCredentialOrNull: (...args: unknown[]) => + mockGetIntegrationCredentialOrNull(...args), +})); + +const mockGetIntegrationProvider = vi.fn(); +vi.mock('../../../../src/db/repositories/credentialsRepository.js', () => ({ + getIntegrationProvider: (...args: unknown[]) => mockGetIntegrationProvider(...args), +})); + +const mockWithLinearCredentials = vi.fn().mockImplementation((_creds, fn) => fn()); +vi.mock('../../../../src/linear/client.js', () => ({ + withLinearCredentials: (...args: unknown[]) => mockWithLinearCredentials(...args), + linearClient: { + getMe: vi.fn().mockResolvedValue({ id: 'user-bot', name: 'Bot', email: 'bot@example.com' }), + createComment: vi.fn().mockResolvedValue({ id: 'comment-id', body: 'msg' }), + deleteComment: vi.fn().mockResolvedValue(undefined), + }, +})); + +const mockGetLinearConfig = vi.fn(); +vi.mock('../../../../src/pm/config.js', () => ({ + getLinearConfig: (...args: unknown[]) => mockGetLinearConfig(...args), +})); + +// Must mock registerCredentialRoles to avoid side effects in tests +vi.mock('../../../../src/config/integrationRoles.js', () => ({ + PROVIDER_CREDENTIAL_ROLES: new Proxy( + {}, + { + get(_target, prop: string) { + if (prop === 'linear') { + return [ + { role: 'api_key', label: 'API Key', envVarKey: 'LINEAR_API_KEY' }, + { + role: 'webhook_secret', + label: 'Webhook Secret', + envVarKey: 'LINEAR_WEBHOOK_SECRET', + optional: true, + }, + ]; + } + return []; + }, + }, + ), + registerCredentialRoles: vi.fn(), +})); + +import { LinearIntegration } from '../../../../src/pm/linear/integration.js'; +import type { ProjectConfig } from '../../../../src/types/index.js'; + +// --------------------------------------------------------------------------- +// Helpers +// --------------------------------------------------------------------------- + +function makeProject(overrides: Partial = {}): ProjectConfig { + return { + id: 'proj-1', + orgId: 'org-1', + name: 'Test Linear Project', + repo: 'owner/repo', + baseBranch: 'main', + branchPrefix: 'feature/', + pm: { type: 'linear' }, + ...overrides, + } as ProjectConfig; +} + +function makeLinearConfig(overrides: Record = {}) { + return { + teamId: 'team-abc', + statuses: { + backlog: 'state-backlog', + inProgress: 'state-in-progress', + inReview: 'state-in-review', + done: 'state-done', + merged: 'state-merged', + }, + labels: { + processing: 'cascade-processing', + processed: 'cascade-processed', + error: 'cascade-error', + readyToProcess: 'cascade-ready', + auto: 'cascade-auto', + }, + ...overrides, + }; +} + +// --------------------------------------------------------------------------- +// Tests +// --------------------------------------------------------------------------- + +describe('LinearIntegration', () => { + let integration: LinearIntegration; + + beforeEach(() => { + integration = new LinearIntegration(); + mockGetLinearConfig.mockReturnValue(makeLinearConfig()); + vi.clearAllMocks(); + mockGetLinearConfig.mockReturnValue(makeLinearConfig()); + mockWithLinearCredentials.mockImplementation((_creds, fn) => fn()); + }); + + it('has type "linear"', () => { + expect(integration.type).toBe('linear'); + }); + + it('has category "pm"', () => { + expect(integration.category).toBe('pm'); + }); + + // ========================================================================= + // hasIntegration + // ========================================================================= + describe('hasIntegration', () => { + it('returns false when PM provider is not linear', async () => { + mockGetIntegrationProvider.mockResolvedValue(null); + + const result = await integration.hasIntegration('proj-1'); + + expect(result).toBe(false); + expect(mockGetIntegrationCredentialOrNull).not.toHaveBeenCalled(); + }); + + it('returns false when PM provider is trello (not linear)', async () => { + mockGetIntegrationProvider.mockResolvedValue('trello'); + + const result = await integration.hasIntegration('proj-1'); + + expect(result).toBe(false); + }); + + it('returns true when provider is linear and required credentials are present', async () => { + mockGetIntegrationProvider.mockResolvedValue('linear'); + // LINEAR_API_KEY is required; LINEAR_WEBHOOK_SECRET is optional + mockGetIntegrationCredentialOrNull.mockResolvedValueOnce('lin_api_key_xxx'); + + const result = await integration.hasIntegration('proj-1'); + + expect(result).toBe(true); + }); + + it('returns false when api_key is missing', async () => { + mockGetIntegrationProvider.mockResolvedValue('linear'); + mockGetIntegrationCredentialOrNull.mockResolvedValueOnce(null); + + const result = await integration.hasIntegration('proj-1'); + + expect(result).toBe(false); + }); + }); + + // ========================================================================= + // createProvider + // ========================================================================= + describe('createProvider', () => { + it('returns a LinearPMProvider instance when teamId is present', () => { + const project = makeProject(); + const provider = integration.createProvider(project); + expect(provider).toBeDefined(); + expect(provider.type).toBe('linear'); + }); + + it('throws when linear config has no teamId', () => { + mockGetLinearConfig.mockReturnValue({ statuses: {} }); // no teamId + const project = makeProject(); + expect(() => integration.createProvider(project)).toThrow( + 'Linear integration requires teamId in config', + ); + }); + + it('throws when linear config is undefined', () => { + mockGetLinearConfig.mockReturnValue(undefined); + const project = makeProject(); + expect(() => integration.createProvider(project)).toThrow( + 'Linear integration requires teamId in config', + ); + }); + }); + + // ========================================================================= + // withCredentials + // ========================================================================= + describe('withCredentials', () => { + it('fetches api_key and calls withLinearCredentials', async () => { + mockGetIntegrationCredential.mockResolvedValueOnce('lin_api_key_xxx'); + + const fn = vi.fn().mockResolvedValue('done'); + const result = await integration.withCredentials('proj-1', fn); + + expect(mockGetIntegrationCredential).toHaveBeenCalledWith('proj-1', 'pm', 'api_key'); + expect(mockWithLinearCredentials).toHaveBeenCalledWith({ apiKey: 'lin_api_key_xxx' }, fn); + expect(result).toBe('done'); + }); + }); + + // ========================================================================= + // resolveLifecycleConfig + // ========================================================================= + describe('resolveLifecycleConfig', () => { + it('maps linear labels and statuses to lifecycle config', () => { + const project = makeProject(); + const config = integration.resolveLifecycleConfig(project); + + expect(config.labels.processing).toBe('cascade-processing'); + expect(config.labels.processed).toBe('cascade-processed'); + expect(config.labels.error).toBe('cascade-error'); + expect(config.labels.readyToProcess).toBe('cascade-ready'); + expect(config.labels.auto).toBe('cascade-auto'); + expect(config.statuses.backlog).toBe('state-backlog'); + expect(config.statuses.inProgress).toBe('state-in-progress'); + expect(config.statuses.done).toBe('state-done'); + }); + + it('uses defaults when labels config is missing', () => { + mockGetLinearConfig.mockReturnValue({ teamId: 'team-abc', statuses: {} }); + const project = makeProject(); + const config = integration.resolveLifecycleConfig(project); + + expect(config.labels.processing).toBe('cascade-processing'); + expect(config.labels.processed).toBe('cascade-processed'); + expect(config.labels.readyToProcess).toBe('cascade-ready'); + }); + + it('has undefined statuses when linear config has no statuses', () => { + mockGetLinearConfig.mockReturnValue({ teamId: 'team-abc', statuses: {} }); + const project = makeProject(); + const config = integration.resolveLifecycleConfig(project); + + expect(config.statuses.backlog).toBeUndefined(); + }); + }); + + // ========================================================================= + // parseWebhookPayload + // ========================================================================= + describe('parseWebhookPayload', () => { + it('returns null when payload is null', () => { + expect(integration.parseWebhookPayload(null)).toBeNull(); + }); + + it('returns null when payload is not an object', () => { + expect(integration.parseWebhookPayload('string')).toBeNull(); + }); + + it('returns null when action or type is missing', () => { + expect(integration.parseWebhookPayload({ action: 'create' })).toBeNull(); + expect(integration.parseWebhookPayload({ type: 'Issue' })).toBeNull(); + }); + + it('returns null when data is missing', () => { + const raw = { action: 'create', type: 'Issue' }; + expect(integration.parseWebhookPayload(raw)).toBeNull(); + }); + + it('returns null when projectIdentifier is missing', () => { + const raw = { + action: 'create', + type: 'Issue', + data: { identifier: 'TEAM-1' }, // no teamId + }; + expect(integration.parseWebhookPayload(raw)).toBeNull(); + }); + + it('parses an Issue.create payload', () => { + const raw = { + action: 'create', + type: 'Issue', + organizationId: 'org-123', + data: { + id: 'issue-uuid', + identifier: 'TEAM-123', + teamId: 'team-abc', + }, + }; + + const result = integration.parseWebhookPayload(raw); + + expect(result).not.toBeNull(); + expect(result?.eventType).toBe('Issue.create'); + expect(result?.projectIdentifier).toBe('team-abc'); + expect(result?.workItemId).toBe('TEAM-123'); + expect(result?.raw).toBe(raw); + }); + + it('parses an Issue.update payload', () => { + const raw = { + action: 'update', + type: 'Issue', + data: { + id: 'issue-uuid', + identifier: 'ENG-456', + teamId: 'team-xyz', + }, + }; + + const result = integration.parseWebhookPayload(raw); + + expect(result?.eventType).toBe('Issue.update'); + expect(result?.projectIdentifier).toBe('team-xyz'); + expect(result?.workItemId).toBe('ENG-456'); + }); + + it('parses a Comment.create payload', () => { + const raw = { + action: 'create', + type: 'Comment', + data: { + id: 'comment-uuid', + body: 'Hello', + userId: 'user-123', + issue: { + id: 'issue-uuid', + identifier: 'TEAM-7', + teamId: 'team-abc', + }, + }, + }; + + const result = integration.parseWebhookPayload(raw); + + expect(result?.eventType).toBe('Comment.create'); + expect(result?.projectIdentifier).toBe('team-abc'); + expect(result?.workItemId).toBe('TEAM-7'); + }); + }); + + // ========================================================================= + // isSelfAuthored + // ========================================================================= + describe('isSelfAuthored', () => { + it('returns false for non-comment events', async () => { + const event = { + eventType: 'Issue.update', + projectIdentifier: 'team-abc', + raw: {}, + }; + const result = await integration.isSelfAuthored(event, 'proj-1'); + expect(result).toBe(false); + }); + + it('returns false when comment has no userId', async () => { + const event = { + eventType: 'Comment.create', + projectIdentifier: 'team-abc', + raw: { data: {} }, + }; + const result = await integration.isSelfAuthored(event, 'proj-1'); + expect(result).toBe(false); + }); + }); + + // ========================================================================= + // lookupProject + // ========================================================================= + describe('lookupProject', () => { + it('returns null (not yet implemented)', async () => { + const result = await integration.lookupProject('team-abc'); + expect(result).toBeNull(); + }); + }); + + // ========================================================================= + // extractWorkItemId + // ========================================================================= + describe('extractWorkItemId', () => { + it('extracts Linear issue identifier from text', () => { + expect(integration.extractWorkItemId('Working on TEAM-123 today')).toBe('TEAM-123'); + }); + + it('extracts issue identifier from Linear URL', () => { + expect( + integration.extractWorkItemId('See https://linear.app/myorg/issue/ENG-42 for details'), + ).toBe('ENG-42'); + }); + + it('extracts from PR body with Linear URL', () => { + expect( + integration.extractWorkItemId( + 'Fixes https://linear.app/acme/issue/ACME-999\n\nImplementation details...', + ), + ).toBe('ACME-999'); + }); + + it('extracts using text pattern when no URL', () => { + expect(integration.extractWorkItemId('Refs ABC-42')).toBe('ABC-42'); + }); + + it('returns null when no identifier found', () => { + expect(integration.extractWorkItemId('No issue reference here')).toBeNull(); + }); + + it('returns null for lowercase issue references', () => { + expect(integration.extractWorkItemId('team-123 is lowercase')).toBeNull(); + }); + + it('matches multi-letter team keys', () => { + expect(integration.extractWorkItemId('MYTEAM-999')).toBe('MYTEAM-999'); + }); + + it('prefers URL match over text match', () => { + expect( + integration.extractWorkItemId('URL: https://linear.app/org/issue/FRONT-10 text: BACK-20'), + ).toBe('FRONT-10'); + }); + }); +}); From aa0c4f745f18378752e0488d16a5c91879d3e3ba Mon Sep 17 00:00:00 2001 From: aaight Date: Tue, 14 Apr 2026 21:45:56 +0200 Subject: [PATCH 27/52] feat(linear): register Linear in bootstrap and add DB lookup functions (#1097) Co-authored-by: Cascade Bot --- src/config/configCache.ts | 11 ++++ src/config/provider.ts | 19 ++++++ src/db/repositories/configMapper.ts | 50 +++++++++++++++- src/db/repositories/configRepository.ts | 23 ++++++- tests/unit/config/provider.test.ts | 41 +++++++++++++ .../unit/db/repositories/configMapper.test.ts | 46 ++++++++++++++ .../db/repositories/configRepository.test.ts | 60 +++++++++++++++++++ 7 files changed, 247 insertions(+), 3 deletions(-) diff --git a/src/config/configCache.ts b/src/config/configCache.ts index 8aa5b643..d9075617 100644 --- a/src/config/configCache.ts +++ b/src/config/configCache.ts @@ -12,6 +12,7 @@ class ConfigCache { private projectByBoardId = new Map>(); private projectByRepo = new Map>(); private projectByJiraKey = new Map>(); + private projectByLinearTeamId = new Map>(); private orgIdByProject = new Map>(); private ttlMs: number; @@ -62,6 +63,15 @@ class ConfigCache { this.projectByJiraKey.set(projectKey, this.makeEntry(project)); } + getProjectByLinearTeamId(teamId: string): ProjectConfig | undefined | null { + const entry = this.projectByLinearTeamId.get(teamId); + return this.isValid(entry) ? entry.data : null; + } + + setProjectByLinearTeamId(teamId: string, project: ProjectConfig | undefined): void { + this.projectByLinearTeamId.set(teamId, this.makeEntry(project)); + } + getOrgIdForProject(projectId: string): string | null { const entry = this.orgIdByProject.get(projectId); return this.isValid(entry) ? entry.data : null; @@ -76,6 +86,7 @@ class ConfigCache { this.projectByBoardId.clear(); this.projectByRepo.clear(); this.projectByJiraKey.clear(); + this.projectByLinearTeamId.clear(); this.orgIdByProject.clear(); } } diff --git a/src/config/provider.ts b/src/config/provider.ts index d26f9a7d..437e142e 100644 --- a/src/config/provider.ts +++ b/src/config/provider.ts @@ -2,10 +2,12 @@ import { findProjectByBoardIdFromDb, findProjectByIdFromDb, findProjectByJiraProjectKeyFromDb, + findProjectByLinearTeamIdFromDb, findProjectByRepoFromDb, findProjectWithConfigByBoardId, findProjectWithConfigById, findProjectWithConfigByJiraProjectKey, + findProjectWithConfigByLinearTeamId, findProjectWithConfigByRepo, loadConfigFromDb, } from '../db/repositories/configRepository.js'; @@ -55,6 +57,17 @@ export async function findProjectByJiraProjectKey( return project; } +export async function findProjectByLinearTeamId( + teamId: string, +): Promise { + const cached = configCache.getProjectByLinearTeamId(teamId); + if (cached !== null) return cached; + + const project = await findProjectByLinearTeamIdFromDb(teamId); + configCache.setProjectByLinearTeamId(teamId, project); + return project; +} + export async function findProjectById(id: string): Promise { // No cache for by-id lookups (less frequent, PK is fast) return findProjectByIdFromDb(id); @@ -82,6 +95,12 @@ export async function loadProjectConfigByJiraProjectKey( return findProjectWithConfigByJiraProjectKey(projectKey); } +export async function loadProjectConfigByLinearTeamId( + teamId: string, +): Promise { + return findProjectWithConfigByLinearTeamId(teamId); +} + export async function loadProjectConfigById(id: string): Promise { return findProjectWithConfigById(id); } diff --git a/src/db/repositories/configMapper.ts b/src/db/repositories/configMapper.ts index 64889b01..ad8a2aa3 100644 --- a/src/db/repositories/configMapper.ts +++ b/src/db/repositories/configMapper.ts @@ -28,6 +28,19 @@ export interface JiraIntegrationConfig { labels?: Record; } +export interface LinearIntegrationConfig { + teamId: string; + statuses: Record; + labels?: { + processing?: string; + processed?: string; + error?: string; + readyToProcess?: string; + auto?: string; + }; + customFields?: { cost?: string }; +} + // biome-ignore lint/complexity/noBannedTypes: GitHub config has no fields (credentials are in integration_credentials) export type GitHubIntegrationConfig = {}; @@ -60,6 +73,7 @@ export interface MapProjectInput { projectAgentConfigs: AgentConfigRow[]; trelloConfig?: TrelloIntegrationConfig; jiraConfig?: JiraIntegrationConfig; + linearConfig?: LinearIntegrationConfig; githubConfig?: GitHubIntegrationConfig; } @@ -103,6 +117,18 @@ export interface ProjectConfigRaw { customFields?: { cost?: string }; labels?: Record; }; + linear?: { + teamId: string; + statuses: Record; + labels?: { + processing?: string; + processed?: string; + error?: string; + readyToProcess?: string; + auto?: string; + }; + customFields?: { cost?: string }; + }; agentEngine?: { default?: string; overrides: Record; @@ -181,6 +207,15 @@ function buildJiraConfig(config: JiraIntegrationConfig): ProjectConfigRaw['jira' }; } +function buildLinearConfig(config: LinearIntegrationConfig): ProjectConfigRaw['linear'] { + return { + teamId: config.teamId, + statuses: config.statuses, + labels: config.labels, + customFields: config.customFields, + }; +} + function buildAgentEngineConfig( row: ProjectRow, engines: Record, @@ -192,7 +227,10 @@ function buildAgentEngineConfig( }; } -function buildBaseProjectFields(row: ProjectRow, pmType: 'trello' | 'jira'): ProjectConfigRaw { +function buildBaseProjectFields( + row: ProjectRow, + pmType: 'trello' | 'jira' | 'linear', +): ProjectConfigRaw { return { id: row.id, orgId: row.orgId, @@ -222,15 +260,18 @@ function buildBaseProjectFields(row: ProjectRow, pmType: 'trello' | 'jira'): Pro export function extractIntegrationConfigs(integrations: IntegrationRow[]): { trelloConfig?: TrelloIntegrationConfig; jiraConfig?: JiraIntegrationConfig; + linearConfig?: LinearIntegrationConfig; githubConfig?: GitHubIntegrationConfig; } { const trelloRow = integrations.find((i) => i.provider === 'trello'); const jiraRow = integrations.find((i) => i.provider === 'jira'); + const linearRow = integrations.find((i) => i.provider === 'linear'); const githubRow = integrations.find((i) => i.provider === 'github'); return { trelloConfig: trelloRow?.config as TrelloIntegrationConfig | undefined, jiraConfig: jiraRow?.config as JiraIntegrationConfig | undefined, + linearConfig: linearRow?.config as LinearIntegrationConfig | undefined, githubConfig: githubRow?.config as GitHubIntegrationConfig | undefined, }; } @@ -240,6 +281,7 @@ export function mapProjectRow({ projectAgentConfigs, trelloConfig, jiraConfig, + linearConfig, }: MapProjectInput): ProjectConfigRaw { const { models, @@ -248,7 +290,7 @@ export function mapProjectRow({ } = buildAgentMaps(projectAgentConfigs); // Derive PM type from integration config - const pmType = jiraConfig ? 'jira' : 'trello'; + const pmType = jiraConfig ? 'jira' : linearConfig ? 'linear' : 'trello'; const project: ProjectConfigRaw = { ...buildBaseProjectFields(row, pmType), @@ -266,6 +308,10 @@ export function mapProjectRow({ project.jira = buildJiraConfig(jiraConfig); } + if (linearConfig) { + project.linear = buildLinearConfig(linearConfig); + } + const agentEngine = buildAgentEngineConfig(row, engines); if (agentEngine) { project.agentEngine = agentEngine; diff --git a/src/db/repositories/configRepository.ts b/src/db/repositories/configRepository.ts index 558a6a04..9eb4bc6c 100644 --- a/src/db/repositories/configRepository.ts +++ b/src/db/repositories/configRepository.ts @@ -38,12 +38,14 @@ function buildRawConfig({ return { projects: projectRows.map((row) => { const integrations = integrationsByProject.get(row.id) ?? []; - const { trelloConfig, jiraConfig, githubConfig } = extractIntegrationConfigs(integrations); + const { trelloConfig, jiraConfig, linearConfig, githubConfig } = + extractIntegrationConfigs(integrations); return mapProjectRow({ row, projectAgentConfigs: projectAgentConfigsMap.get(row.id) ?? [], trelloConfig, jiraConfig, + linearConfig, githubConfig, }); }), @@ -126,6 +128,13 @@ const jiraProjectKeyWhereClause = (projectKey: string) => AND ${projectIntegrations.config}->>'projectKey' = ${projectKey} )`; +const linearTeamIdWhereClause = (teamId: string) => + sql`${projects.id} IN ( + SELECT ${projectIntegrations.projectId} FROM ${projectIntegrations} + WHERE ${projectIntegrations.provider} = 'linear' + AND ${projectIntegrations.config}->>'teamId' = ${teamId} + )`; + export function findProjectByBoardIdFromDb(boardId: string): Promise { return findProjectFromDb(boardIdWhereClause(boardId)); } @@ -144,6 +153,12 @@ export function findProjectByJiraProjectKeyFromDb( return findProjectFromDb(jiraProjectKeyWhereClause(projectKey)); } +export function findProjectByLinearTeamIdFromDb( + teamId: string, +): Promise { + return findProjectFromDb(linearTeamIdWhereClause(teamId)); +} + // WithConfig variants — return both the project and its org-scoped CascadeConfig export function findProjectWithConfigByBoardId( @@ -165,3 +180,9 @@ export function findProjectWithConfigByJiraProjectKey( ): Promise { return findProjectConfigFromDb(jiraProjectKeyWhereClause(projectKey)); } + +export function findProjectWithConfigByLinearTeamId( + teamId: string, +): Promise { + return findProjectConfigFromDb(linearTeamIdWhereClause(teamId)); +} diff --git a/tests/unit/config/provider.test.ts b/tests/unit/config/provider.test.ts index 35e0a815..49505718 100644 --- a/tests/unit/config/provider.test.ts +++ b/tests/unit/config/provider.test.ts @@ -6,7 +6,9 @@ vi.mock('../../../src/db/repositories/configRepository.js', () => ({ findProjectByBoardIdFromDb: vi.fn(), findProjectByRepoFromDb: vi.fn(), findProjectByJiraProjectKeyFromDb: vi.fn(), + findProjectByLinearTeamIdFromDb: vi.fn(), findProjectByIdFromDb: vi.fn(), + findProjectWithConfigByLinearTeamId: vi.fn(), })); vi.mock('../../../src/db/repositories/credentialsRepository.js', () => ({ @@ -25,6 +27,8 @@ vi.mock('../../../src/config/configCache.js', () => ({ setProjectByRepo: vi.fn(), getProjectByJiraKey: vi.fn(), setProjectByJiraKey: vi.fn(), + getProjectByLinearTeamId: vi.fn(), + setProjectByLinearTeamId: vi.fn(), getOrgIdForProject: vi.fn(), setOrgIdForProject: vi.fn(), invalidate: vi.fn(), @@ -38,6 +42,7 @@ import { findProjectByBoardId, findProjectById, findProjectByJiraProjectKey, + findProjectByLinearTeamId, findProjectByRepo, getAllProjectCredentials, getIntegrationCredential, @@ -51,6 +56,7 @@ import { findProjectByBoardIdFromDb, findProjectByIdFromDb, findProjectByJiraProjectKeyFromDb, + findProjectByLinearTeamIdFromDb, findProjectByRepoFromDb, loadConfigFromDb, } from '../../../src/db/repositories/configRepository.js'; @@ -253,6 +259,41 @@ describe('config/provider', () => { }); }); + describe('findProjectByLinearTeamId', () => { + it('returns cached project when available', async () => { + vi.mocked(configCache.getProjectByLinearTeamId).mockReturnValue(mockProject); + + const result = await findProjectByLinearTeamId('team-abc123'); + + expect(result).toBe(mockProject); + expect(findProjectByLinearTeamIdFromDb).not.toHaveBeenCalled(); + }); + + it('loads project from DB when not cached', async () => { + vi.mocked(configCache.getProjectByLinearTeamId).mockReturnValue(null); + vi.mocked(findProjectByLinearTeamIdFromDb).mockResolvedValue(mockProject); + + const result = await findProjectByLinearTeamId('team-abc123'); + + expect(result).toBe(mockProject); + expect(findProjectByLinearTeamIdFromDb).toHaveBeenCalledWith('team-abc123'); + expect(configCache.setProjectByLinearTeamId).toHaveBeenCalledWith('team-abc123', mockProject); + }); + + it('caches undefined when project not found', async () => { + vi.mocked(configCache.getProjectByLinearTeamId).mockReturnValue(null); + vi.mocked(findProjectByLinearTeamIdFromDb).mockResolvedValue(undefined); + + const result = await findProjectByLinearTeamId('nonexistent-team'); + + expect(result).toBeUndefined(); + expect(configCache.setProjectByLinearTeamId).toHaveBeenCalledWith( + 'nonexistent-team', + undefined, + ); + }); + }); + describe('findProjectById', () => { it('does not use cache for by-id lookups', async () => { vi.mocked(findProjectByIdFromDb).mockResolvedValue(mockProject); diff --git a/tests/unit/db/repositories/configMapper.test.ts b/tests/unit/db/repositories/configMapper.test.ts index 5decb6fa..f97ff7d4 100644 --- a/tests/unit/db/repositories/configMapper.test.ts +++ b/tests/unit/db/repositories/configMapper.test.ts @@ -68,6 +68,19 @@ const githubIntegrationRow: IntegrationRow = { config: {}, }; +const linearConfig = { + teamId: 'team-abc123', + statuses: { todo: 'Todo', inProgress: 'In Progress', done: 'Done' }, + labels: { processing: 'label-processing', readyToProcess: 'label-ready' }, +}; + +const linearIntegrationRow: IntegrationRow = { + projectId: 'proj1', + category: 'pm', + provider: 'linear', + config: linearConfig, +}; + // --------------------------------------------------------------------------- // orUndefined // --------------------------------------------------------------------------- @@ -212,10 +225,18 @@ describe('extractIntegrationConfigs', () => { expect(result.githubConfig).toEqual({}); }); + it('extracts linear config from integration rows', () => { + const result = extractIntegrationConfigs([linearIntegrationRow]); + expect(result.linearConfig).toEqual(linearConfig); + expect(result.trelloConfig).toBeUndefined(); + expect(result.jiraConfig).toBeUndefined(); + }); + it('handles empty integration list', () => { const result = extractIntegrationConfigs([]); expect(result.trelloConfig).toBeUndefined(); expect(result.jiraConfig).toBeUndefined(); + expect(result.linearConfig).toBeUndefined(); expect(result.githubConfig).toBeUndefined(); }); @@ -225,6 +246,7 @@ describe('extractIntegrationConfigs', () => { expect(result.trelloConfig).toEqual(trelloConfig); expect(result.githubConfig).toEqual({}); expect(result.jiraConfig).toBeUndefined(); + expect(result.linearConfig).toBeUndefined(); }); }); @@ -272,6 +294,11 @@ describe('mapProjectRow', () => { expect(result.pm.type).toBe('jira'); }); + it('sets pm.type to linear when linearConfig is provided', () => { + const result = mapProjectRow(makeInput({ trelloConfig: undefined, linearConfig })); + expect(result.pm.type).toBe('linear'); + }); + it('builds trello config with boardId, lists, labels', () => { const result = mapProjectRow(makeInput()); expect(result.trello?.boardId).toBe('board123'); @@ -286,6 +313,25 @@ describe('mapProjectRow', () => { expect(result.jira?.statuses).toEqual({ splitting: 'Briefing', todo: 'To Do' }); }); + it('builds linear config with teamId, statuses, and labels', () => { + const result = mapProjectRow(makeInput({ trelloConfig: undefined, linearConfig })); + expect(result.linear?.teamId).toBe('team-abc123'); + expect(result.linear?.statuses).toEqual({ + todo: 'Todo', + inProgress: 'In Progress', + done: 'Done', + }); + expect(result.linear?.labels).toEqual({ + processing: 'label-processing', + readyToProcess: 'label-ready', + }); + }); + + it('does not include linear field when linearConfig is not provided', () => { + const result = mapProjectRow(makeInput()); + expect(result.linear).toBeUndefined(); + }); + it('omits agentEngine when neither row.agentEngine nor agent overrides are set', () => { const result = mapProjectRow(makeInput()); expect(result.agentEngine).toBeUndefined(); diff --git a/tests/unit/db/repositories/configRepository.test.ts b/tests/unit/db/repositories/configRepository.test.ts index 58481daa..0914a0e5 100644 --- a/tests/unit/db/repositories/configRepository.test.ts +++ b/tests/unit/db/repositories/configRepository.test.ts @@ -6,6 +6,7 @@ vi.mock('../../../../src/db/client.js', () => mockDbClientModule); import { findProjectByBoardIdFromDb, findProjectByIdFromDb, + findProjectByLinearTeamIdFromDb, findProjectByRepoFromDb, loadConfigFromDb, } from '../../../../src/db/repositories/configRepository.js'; @@ -68,6 +69,21 @@ const jiraIntegration = { updatedAt: new Date(), }; +const linearIntegration = { + id: 4, + projectId: 'proj1', + category: 'pm' as const, + provider: 'linear' as const, + config: { + teamId: 'team-abc123', + statuses: { todo: 'Todo', inProgress: 'In Progress' }, + labels: { processing: 'cascade-processing', readyToProcess: 'cascade-ready' }, + }, + triggers: {}, + createdAt: new Date(), + updatedAt: new Date(), +}; + const projectAgentConfig = { id: 2, projectId: 'proj1', @@ -507,4 +523,48 @@ describe('configRepository', () => { expect(proj.snapshotTtlMs).toBeUndefined(); }); }); + + describe('Linear integration', () => { + it('loads config with Linear integration from project_integrations', async () => { + const mockDb = createSequentialMockDb([[projectRow], [], [linearIntegration]]); + mockGetDb.mockReturnValue(mockDb as never); + + const config = await loadConfigFromDb(); + + expect(config.projects).toHaveLength(1); + const proj = config.projects[0]; + expect(proj.pm?.type).toBe('linear'); + expect(proj.linear?.teamId).toBe('team-abc123'); + expect(proj.linear?.statuses).toEqual({ todo: 'Todo', inProgress: 'In Progress' }); + expect(proj.linear?.labels?.processing).toBe('cascade-processing'); + expect(proj.linear?.labels?.readyToProcess).toBe('cascade-ready'); + }); + }); + + describe('findProjectByLinearTeamIdFromDb', () => { + it('returns project found via integrations teamId subquery', async () => { + const mockDb = createSequentialMockDb([ + [projectRow], // subquery finds project + [], + [linearIntegration], + ]); + mockGetDb.mockReturnValue(mockDb as never); + + const result = await findProjectByLinearTeamIdFromDb('team-abc123'); + + expect(result).toBeDefined(); + expect(result?.id).toBe('proj1'); + expect(result?.linear?.teamId).toBe('team-abc123'); + expect(result?.pm?.type).toBe('linear'); + }); + + it('returns undefined when no project has matching Linear team ID', async () => { + const mockDb = createSequentialMockDb([[]]); + mockGetDb.mockReturnValue(mockDb as never); + + const result = await findProjectByLinearTeamIdFromDb('nonexistent-team'); + + expect(result).toBeUndefined(); + }); + }); }); From 8a4009b28469d807413bff3f3de273bf22854b6d Mon Sep 17 00:00:00 2001 From: Zbigniew Sobiecki Date: Tue, 14 Apr 2026 21:55:51 +0200 Subject: [PATCH 28/52] chore(deps): bump vite to 6.4.2 and picomatch to 4.0.4 in /web (#1098) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Security patches for two advisories blocked on Dependabot: - vite 6.4.1 → 6.4.2: path traversal in optimize deps sourcemap handler, server.fs check for env transport (vitejs/vite#22161, #22159) - picomatch 4.0.3 → 4.0.4: CVE-2026-33671, CVE-2026-33672 Replaces #1088 and #1058, which were stuck on CI because Dependabot's lockfile regeneration produced a divergent lockfile vs. dev (dropped @trpc/server and react-is resolved entries, added platform-specific tailwindcss-oxide-wasm32-wasi nested entries). Rather than iterate on @dependabot recreate, bundled both bumps into a single manual PR with a lockfile regenerated from dev's current state. Co-authored-by: Claude Opus 4.6 (1M context) --- web/package-lock.json | 14 +++++++------- web/package.json | 2 +- 2 files changed, 8 insertions(+), 8 deletions(-) diff --git a/web/package-lock.json b/web/package-lock.json index 268b547a..711660c0 100644 --- a/web/package-lock.json +++ b/web/package-lock.json @@ -35,7 +35,7 @@ "@vitejs/plugin-react": "^4.5.2", "tailwindcss": "^4.1.7", "typescript": "^5.7.2", - "vite": "^6.3.5" + "vite": "^6.4.2" } }, "node_modules/@babel/code-frame": { @@ -5153,9 +5153,9 @@ "license": "ISC" }, "node_modules/picomatch": { - "version": "4.0.3", - "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-4.0.3.tgz", - "integrity": "sha512-5gTmgEY/sqK6gFXLIsQNH19lWb4ebPDLA4SdLP7dsWkIXHWlG66oPuVvXSGFPppYZz8ZDZq0dYYrbHfBCVUb1Q==", + "version": "4.0.4", + "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-4.0.4.tgz", + "integrity": "sha512-QP88BAKvMam/3NxH6vj2o21R6MjxZUAd6nlwAS/pnGvN9IVLocLHxGYIzFhg6fUQ+5th6P4dv4eW9jX3DSIj7A==", "dev": true, "license": "MIT", "engines": { @@ -6023,9 +6023,9 @@ } }, "node_modules/vite": { - "version": "6.4.1", - "resolved": "https://registry.npmjs.org/vite/-/vite-6.4.1.tgz", - "integrity": "sha512-+Oxm7q9hDoLMyJOYfUYBuHQo+dkAloi33apOPP56pzj+vsdJDzr+j1NISE5pyaAuKL4A3UD34qd0lx5+kfKp2g==", + "version": "6.4.2", + "resolved": "https://registry.npmjs.org/vite/-/vite-6.4.2.tgz", + "integrity": "sha512-2N/55r4JDJ4gdrCvGgINMy+HH3iRpNIz8K6SFwVsA+JbQScLiC+clmAxBgwiSPgcG9U15QmvqCGWzMbqda5zGQ==", "dev": true, "license": "MIT", "dependencies": { diff --git a/web/package.json b/web/package.json index bf9f0bc4..55bcd949 100644 --- a/web/package.json +++ b/web/package.json @@ -37,6 +37,6 @@ "@vitejs/plugin-react": "^4.5.2", "tailwindcss": "^4.1.7", "typescript": "^5.7.2", - "vite": "^6.3.5" + "vite": "^6.4.2" } } From 04ab3c38a7987d4852a70899a64629a3b11aaf2b Mon Sep 17 00:00:00 2001 From: aaight Date: Tue, 14 Apr 2026 22:12:44 +0200 Subject: [PATCH 29/52] feat(linear): add Linear webhook ingestion in router (#1099) * feat(linear): add Linear webhook ingestion in router * fix(linear): implement isSelfAuthored to prevent infinite comment loop Add resolveLinearBotUserId() using the Linear GraphQL viewer query, mirroring the JIRA bot identity pattern. isSelfAuthored() now compares the comment payload's userId against the bot's user ID so bot-posted ack comments are filtered before trigger dispatch, preventing an infinite create/Comment webhook loop when triggers match comment creation events. Co-Authored-By: Claude Opus 4.6 --------- Co-authored-by: Cascade Bot Co-authored-by: Claude Opus 4.6 --- src/router/ackMessageGenerator.ts | 26 ++ src/router/acknowledgments.ts | 26 ++ src/router/adapters/linear.ts | 214 +++++++++ src/router/bot-identity-resolvers.ts | 42 +- src/router/index.ts | 27 ++ src/router/platformClients/credentials.ts | 22 +- src/router/platformClients/index.ts | 2 + src/router/platformClients/linear.ts | 136 ++++++ src/router/queue.ts | 14 +- src/router/webhookVerification.ts | 36 +- src/webhook/signatureVerification.ts | 21 + src/webhook/webhookHandlers.ts | 1 + src/webhook/webhookParsers.ts | 24 + tests/unit/router/adapters/linear.test.ts | 489 ++++++++++++++++++++ tests/unit/router/webhook-signature.test.ts | 125 +++++ 15 files changed, 1201 insertions(+), 4 deletions(-) create mode 100644 src/router/adapters/linear.ts create mode 100644 src/router/platformClients/linear.ts create mode 100644 tests/unit/router/adapters/linear.test.ts diff --git a/src/router/ackMessageGenerator.ts b/src/router/ackMessageGenerator.ts index 0f757d26..46aff6fd 100644 --- a/src/router/ackMessageGenerator.ts +++ b/src/router/ackMessageGenerator.ts @@ -160,6 +160,32 @@ export function extractJiraContext(payload: unknown): string { return truncate(parts.join('\n')); } +/** + * Extract context from a Linear webhook payload. + * Pulls issue title and optional comment body. + */ +export function extractLinearContext(payload: unknown): string { + if (!payload || typeof payload !== 'object') return ''; + + const p = payload as Record; + const parts: string[] = []; + + const data = p.data as Record | undefined; + if (!data) return ''; + + // Issue title (present for Issue and Comment events) + if (data.title) { + parts.push(`Issue: ${data.title as string}`); + } + + // Comment body (present for Comment events) + if (data.body) { + parts.push(`Comment: ${data.body as string}`); + } + + return truncate(parts.join('\n')); +} + // --------------------------------------------------------------------------- // Core generator // --------------------------------------------------------------------------- diff --git a/src/router/acknowledgments.ts b/src/router/acknowledgments.ts index 0585a8b9..42a81df3 100644 --- a/src/router/acknowledgments.ts +++ b/src/router/acknowledgments.ts @@ -13,6 +13,7 @@ import { GitHubPlatformClient, JiraPlatformClient, + LinearPlatformClient, TrelloPlatformClient, } from './platformClients/index.js'; @@ -90,6 +91,29 @@ export async function deleteJiraAck( await client.deleteComment(issueKey, commentId); } +// --------------------------------------------------------------------------- +// Linear — delegates to LinearPlatformClient +// --------------------------------------------------------------------------- + +export async function postLinearAck( + projectId: string, + issueId: string, + message: string, +): Promise { + const client = new LinearPlatformClient(projectId); + const result = await client.postComment(issueId, message); + return typeof result === 'string' ? result : null; +} + +export async function deleteLinearAck( + projectId: string, + issueId: string, + commentId: string, +): Promise { + const client = new LinearPlatformClient(projectId); + await client.deleteComment(issueId, commentId); +} + // --------------------------------------------------------------------------- // Bot identity resolution — re-exported from bot-identity-resolvers.ts // for backward compatibility with pm/ integrations and router/trello.ts. @@ -97,8 +121,10 @@ export async function deleteJiraAck( export { _resetJiraBotCache, + _resetLinearBotCache, _resetTrelloBotCache, resolveJiraBotAccountId, + resolveLinearBotUserId, resolveTrelloBotMemberId, } from './bot-identity-resolvers.js'; diff --git a/src/router/adapters/linear.ts b/src/router/adapters/linear.ts new file mode 100644 index 00000000..2f4cdd08 --- /dev/null +++ b/src/router/adapters/linear.ts @@ -0,0 +1,214 @@ +/** + * LinearRouterAdapter — platform-specific logic for the router-side + * Linear webhook processing pipeline. + * + * Follows the same pattern as JiraRouterAdapter and SentryRouterAdapter, + * implementing RouterPlatformAdapter so it can be driven by the generic + * processRouterWebhook() function. + */ + +import { withLinearCredentials } from '../../linear/client.js'; +import type { LinearWebhookPayload } from '../../linear/types.js'; +import type { TriggerRegistry } from '../../triggers/registry.js'; +import type { TriggerContext, TriggerResult } from '../../types/index.js'; +import { logger } from '../../utils/logging.js'; +import { buildWorkItemRunsLink, getDashboardUrl } from '../../utils/runLink.js'; +import { extractLinearContext, generateAckMessage } from '../ackMessageGenerator.js'; +import { postLinearAck, resolveLinearBotUserId } from '../acknowledgments.js'; +import { loadProjectConfig, type RouterProjectConfig } from '../config.js'; +import type { AckResult, ParsedWebhookEvent, RouterPlatformAdapter } from '../platform-adapter.js'; +import { resolveLinearCredentials } from '../platformClients/index.js'; +import type { CascadeJob, LinearJob } from '../queue.js'; + +// ============================================================================ +// Processable event combinations (action/type) +// ============================================================================ + +const PROCESSABLE_TYPES = ['Issue', 'Comment', 'IssueLabel'] as const; + +type ProcessableType = (typeof PROCESSABLE_TYPES)[number]; + +// ============================================================================ +// Extended parsed event for Linear +// ============================================================================ + +interface LinearParsedEvent extends ParsedWebhookEvent { + projectId: string; + action: string; + resourceType: string; +} + +// ============================================================================ +// Adapter +// ============================================================================ + +export class LinearRouterAdapter implements RouterPlatformAdapter { + readonly type = 'linear' as const; + + async parseWebhook(payload: unknown): Promise { + const p = payload as LinearWebhookPayload; + + if (!p.action || !p.type || !p.data) { + logger.warn('LinearRouterAdapter: missing required fields', { payload }); + return null; + } + + if (!PROCESSABLE_TYPES.includes(p.type as ProcessableType)) { + logger.debug('LinearRouterAdapter: ignoring non-processable type', { type: p.type }); + return null; + } + + // Extract teamId from payload data for project lookup + const data = p.data as Record; + const teamId = data.teamId as string | undefined; + + if (!teamId) { + logger.debug('LinearRouterAdapter: no teamId in payload data, skipping'); + return null; + } + + const config = await loadProjectConfig(); + const project = config.projects.find((proj) => proj.linear?.teamId === teamId); + if (!project) { + logger.debug('LinearRouterAdapter: no project found for teamId', { teamId }); + return null; + } + + const isCommentEvent = p.type === 'Comment'; + const workItemId = isCommentEvent + ? (data.issueId as string | undefined) + : (data.id as string | undefined); + + return { + projectIdentifier: teamId, + eventType: `${p.action}/${p.type}`, + workItemId, + isCommentEvent, + projectId: project.id, + action: p.action, + resourceType: p.type, + }; + } + + isProcessableEvent(event: ParsedWebhookEvent): boolean { + // All parsed events are processable (we filter in parseWebhook) + return PROCESSABLE_TYPES.some((t) => event.eventType.endsWith(`/${t}`)); + } + + async isSelfAuthored(event: ParsedWebhookEvent, payload: unknown): Promise { + if (!event.isCommentEvent) return false; + const data = (payload as Record)?.data as Record | undefined; + const commentAuthorId = data?.userId as string | undefined; + if (!commentAuthorId) return false; + try { + const projectId = (event as LinearParsedEvent).projectId; + const botId = await resolveLinearBotUserId(projectId); + return !!botId && commentAuthorId === botId; + } catch { + return false; + } + } + + sendReaction(_event: ParsedWebhookEvent, _payload: unknown): void { + // Linear does not support emoji reactions on comments via the same API pattern. + // No-op for now. + } + + async resolveProject(event: ParsedWebhookEvent): Promise { + const config = await loadProjectConfig(); + return config.projects.find((p) => p.linear?.teamId === event.projectIdentifier) ?? null; + } + + async dispatchWithCredentials( + _event: ParsedWebhookEvent, + payload: unknown, + project: RouterProjectConfig, + triggerRegistry: TriggerRegistry, + ): Promise { + const config = await loadProjectConfig(); + const fullProject = config.fullProjects.find((fp) => fp.id === project.id); + if (!fullProject) { + logger.info('LinearRouterAdapter: no full project config found', { + projectId: project.id, + }); + return null; + } + + const linearCreds = await resolveLinearCredentials(project.id); + if (!linearCreds) { + logger.warn('LinearRouterAdapter: missing Linear credentials, cannot dispatch triggers', { + projectId: project.id, + }); + return null; + } + + const ctx: TriggerContext = { project: fullProject, source: 'linear', payload }; + return withLinearCredentials({ apiKey: linearCreds.apiKey }, () => + triggerRegistry.dispatch(ctx), + ); + } + + async postAck( + event: ParsedWebhookEvent, + payload: unknown, + project: RouterProjectConfig, + agentType: string, + _triggerResult?: TriggerResult, + ): Promise { + const linearEvent = event as LinearParsedEvent; + const issueId = linearEvent.workItemId; + if (!issueId) return undefined; + + try { + const context = extractLinearContext(payload); + let message = await generateAckMessage(agentType, context, project.id); + + // Append run link footer when enabled for this project + const config = await loadProjectConfig(); + const fullProject = config.fullProjects.find((fp) => fp.id === project.id); + if (fullProject?.runLinksEnabled && event.workItemId) { + const dashboardUrl = getDashboardUrl(); + if (dashboardUrl) { + const link = buildWorkItemRunsLink({ + dashboardUrl, + projectId: project.id, + workItemId: event.workItemId, + }); + if (link) message += link; + } + } + + const commentId = await postLinearAck(project.id, issueId, message); + if (commentId) return { commentId, message }; + return undefined; + } catch (err) { + logger.warn('LinearRouterAdapter: ack comment failed (non-fatal)', { + error: String(err), + issueId, + }); + return undefined; + } + } + + buildJob( + event: ParsedWebhookEvent, + payload: unknown, + project: RouterProjectConfig, + result: TriggerResult, + ackResult?: AckResult, + ): CascadeJob { + const linearEvent = event as LinearParsedEvent; + const job: LinearJob = { + type: 'linear', + source: 'linear', + payload, + projectId: project.id, + workItemId: linearEvent.workItemId, + eventType: linearEvent.eventType, + receivedAt: new Date().toISOString(), + triggerResult: result, + ackCommentId: ackResult?.commentId as string | undefined, + }; + return job; + } +} diff --git a/src/router/bot-identity-resolvers.ts b/src/router/bot-identity-resolvers.ts index 4c72eb88..68f04cd2 100644 --- a/src/router/bot-identity-resolvers.ts +++ b/src/router/bot-identity-resolvers.ts @@ -8,7 +8,11 @@ */ import { BotIdentityCache } from './bot-identity.js'; -import { resolveJiraCredentials, resolveTrelloCredentials } from './platformClients/index.js'; +import { + resolveJiraCredentials, + resolveLinearCredentials, + resolveTrelloCredentials, +} from './platformClients/index.js'; // --------------------------------------------------------------------------- // JIRA bot identity @@ -70,3 +74,39 @@ export async function resolveTrelloBotMemberId(projectId: string): Promise('userId'); + +/** + * Resolve the Linear user ID for the bot credentials linked to a project. + * Uses the `viewer` query to fetch the authenticated user's ID. + * Cached per-project with 60s TTL. Returns null on any failure. + */ +export async function resolveLinearBotUserId(projectId: string): Promise { + return linearBotIdentityCache.resolve(projectId, async () => { + const creds = await resolveLinearCredentials(projectId); + if (!creds) return null; + + const response = await fetch('https://api.linear.app/graphql', { + method: 'POST', + headers: { + 'Content-Type': 'application/json', + Authorization: `Bearer ${creds.apiKey}`, + }, + body: JSON.stringify({ query: '{ viewer { id } }' }), + }); + if (!response.ok) return null; + + const data = (await response.json()) as { data?: { viewer?: { id?: string } } }; + return data.data?.viewer?.id ?? null; + }); +} + +/** @internal Visible for testing only */ +export function _resetLinearBotCache(): void { + linearBotIdentityCache._reset(); +} diff --git a/src/router/index.ts b/src/router/index.ts index 7ad590d3..2ab721a1 100644 --- a/src/router/index.ts +++ b/src/router/index.ts @@ -15,11 +15,13 @@ import { createWebhookHandler, parseGitHubPayload, parseJiraPayload, + parseLinearPayload, parseSentryPayload, parseTrelloPayload, } from '../webhook/webhookHandlers.js'; import { GitHubRouterAdapter, injectEventType } from './adapters/github.js'; import { JiraRouterAdapter } from './adapters/jira.js'; +import { LinearRouterAdapter } from './adapters/linear.js'; import { SentryRouterAdapter } from './adapters/sentry.js'; import { TrelloRouterAdapter } from './adapters/trello.js'; import { startCancelListener, stopCancelListener } from './cancel-listener.js'; @@ -28,6 +30,7 @@ import { processRouterWebhook } from './webhook-processor.js'; import { verifyGitHubWebhookSignature, verifyJiraWebhookSignature, + verifyLinearWebhookSignature, verifySentryWebhookSignature, verifyTrelloWebhookSignature, } from './webhookVerification.js'; @@ -167,6 +170,30 @@ app.post( }), ); +// Linear webhook verification +app.get('/linear/webhook', (c) => { + return c.text('OK', 200); +}); + +// Linear webhook handler +app.post( + '/linear/webhook', + createWebhookHandler({ + source: 'linear', + parsePayload: parseLinearPayload, + verifySignature: verifyLinearWebhookSignature, + processWebhook: async (payload) => { + const adapter = new LinearRouterAdapter(); + const result = await processRouterWebhook(adapter, payload, triggerRegistry); + return { + processed: result.shouldProcess, + projectId: result.projectId, + decisionReason: result.decisionReason, + }; + }, + }), +); + // Graceful shutdown async function shutdown(signal: string): Promise { logger.info('Received shutdown signal', { signal }); diff --git a/src/router/platformClients/credentials.ts b/src/router/platformClients/credentials.ts index eef48516..7efafcb5 100644 --- a/src/router/platformClients/credentials.ts +++ b/src/router/platformClients/credentials.ts @@ -11,6 +11,7 @@ import { getIntegrationCredential, getIntegrationCredentialOrNull, } from '../../config/provider.js'; +import type { LinearCredentials } from '../../linear/types.js'; import { getJiraConfig } from '../../pm/config.js'; import type { JiraCredentialsWithAuth, TrelloCredentials } from './types.js'; @@ -51,6 +52,21 @@ export async function resolveJiraCredentials( } } +/** + * Resolve Linear credentials for a project. + * Returns `{ apiKey }` or `null` if credentials are missing. + */ +export async function resolveLinearCredentials( + projectId: string, +): Promise { + try { + const apiKey = await getIntegrationCredential(projectId, 'pm', 'api_key'); + return { apiKey }; + } catch { + return null; + } +} + /** * Resolve the webhook secret for a given provider and project. * @@ -59,12 +75,13 @@ export async function resolveJiraCredentials( * Trello computes webhook HMAC signatures using the API Secret (shown below the * API Key at https://trello.com/app-key), not the public API Key. * - `'jira'`: resolves the `webhook_secret` credential from the PM integration. + * - `'linear'`: resolves the `webhook_secret` credential from the PM integration. * * Returns `null` if the credential is not configured. */ export async function resolveWebhookSecret( projectId: string, - provider: 'github' | 'trello' | 'jira' | 'sentry', + provider: 'github' | 'trello' | 'jira' | 'sentry' | 'linear', ): Promise { if (provider === 'github') { return getIntegrationCredentialOrNull(projectId, 'scm', 'webhook_secret'); @@ -75,6 +92,9 @@ export async function resolveWebhookSecret( if (provider === 'sentry') { return getIntegrationCredentialOrNull(projectId, 'alerting', 'webhook_secret'); } + if (provider === 'linear') { + return getIntegrationCredentialOrNull(projectId, 'pm', 'webhook_secret'); + } // Trello signs webhook payloads with the API Secret, not the public API Key. return getIntegrationCredentialOrNull(projectId, 'pm', 'api_secret'); } diff --git a/src/router/platformClients/index.ts b/src/router/platformClients/index.ts index e9b4f9b9..78125531 100644 --- a/src/router/platformClients/index.ts +++ b/src/router/platformClients/index.ts @@ -11,9 +11,11 @@ export { resolveGitHubHeaders, resolveJiraCredentials, + resolveLinearCredentials, resolveTrelloCredentials, } from './credentials.js'; export { GitHubPlatformClient } from './github.js'; export { _resetJiraCloudIdCache, JiraPlatformClient } from './jira.js'; +export { LinearPlatformClient } from './linear.js'; export { TrelloPlatformClient } from './trello.js'; export type { JiraCredentialsWithAuth, PlatformCommentClient, TrelloCredentials } from './types.js'; diff --git a/src/router/platformClients/linear.ts b/src/router/platformClients/linear.ts new file mode 100644 index 00000000..3a32e500 --- /dev/null +++ b/src/router/platformClients/linear.ts @@ -0,0 +1,136 @@ +/** + * Linear platform client for posting/deleting comments on Linear issues + * via the Linear GraphQL API. + * + * Comments are posted using the Linear GraphQL API with markdown body text. + */ + +import { logger } from '../../utils/logging.js'; +import { resolveLinearCredentials } from './credentials.js'; +import type { PlatformCommentClient } from './types.js'; + +const LINEAR_API_URL = 'https://api.linear.app/graphql'; + +async function linearGraphQL( + apiKey: string, + query: string, + variables?: Record, +): Promise> { + const response = await fetch(LINEAR_API_URL, { + method: 'POST', + headers: { + 'Content-Type': 'application/json', + Authorization: `Bearer ${apiKey}`, + }, + body: JSON.stringify({ query, variables }), + }); + + if (!response.ok) { + throw new Error(`Linear API HTTP error ${response.status}`); + } + + const json = (await response.json()) as { + data?: Record; + errors?: Array<{ message: string }>; + }; + + if (json.errors && json.errors.length > 0) { + const messages = json.errors.map((e) => e.message).join('; '); + throw new Error(`Linear API error: ${messages}`); + } + + return json.data ?? {}; +} + +export class LinearPlatformClient implements PlatformCommentClient { + constructor(private readonly projectId: string) {} + + async postComment(issueId: string, message: string): Promise { + const creds = await resolveLinearCredentials(this.projectId); + if (!creds) { + logger.warn('[PlatformClient] Missing Linear credentials, skipping comment'); + return null; + } + + try { + const mutation = ` + mutation CreateComment($issueId: String!, $body: String!) { + commentCreate(input: { issueId: $issueId, body: $body }) { + success + comment { + id + } + } + } + `; + + const data = await linearGraphQL(creds.apiKey, mutation, { + issueId, + body: message, + }); + + const commentCreate = data.commentCreate as + | { success: boolean; comment?: { id: string } } + | undefined; + + if (!commentCreate?.success) { + logger.warn('[PlatformClient] Linear commentCreate returned success=false'); + return null; + } + + const commentId = commentCreate.comment?.id ?? null; + logger.info('[PlatformClient] Linear comment posted for issue:', issueId); + return commentId; + } catch (err) { + logger.warn('[PlatformClient] Failed to post Linear comment:', String(err)); + return null; + } + } + + async deleteComment(_issueId: string, commentId: string | number): Promise { + const creds = await resolveLinearCredentials(this.projectId); + if (!creds) return; + + try { + const mutation = ` + mutation DeleteComment($commentId: String!) { + commentDelete(id: $commentId) { + success + } + } + `; + + await linearGraphQL(creds.apiKey, mutation, { + commentId: String(commentId), + }); + + logger.info('[PlatformClient] Linear comment deleted:', commentId); + } catch (err) { + logger.warn('[PlatformClient] Failed to delete Linear comment:', String(err)); + } + } + + async updateComment(commentId: string, message: string): Promise { + const creds = await resolveLinearCredentials(this.projectId); + if (!creds) return; + + try { + const mutation = ` + mutation UpdateComment($commentId: String!, $body: String!) { + commentUpdate(id: $commentId, input: { body: $body }) { + success + } + } + `; + + await linearGraphQL(creds.apiKey, mutation, { + commentId, + body: message, + }); + + logger.info('[PlatformClient] Linear comment updated:', commentId); + } catch (err) { + logger.warn('[PlatformClient] Failed to update Linear comment:', String(err)); + } + } +} diff --git a/src/router/queue.ts b/src/router/queue.ts index f35035cb..b69067e3 100644 --- a/src/router/queue.ts +++ b/src/router/queue.ts @@ -58,7 +58,19 @@ export interface SentryJob { triggerResult?: TriggerResult; } -export type CascadeJob = TrelloJob | GitHubJob | JiraJob | SentryJob; +export interface LinearJob { + type: 'linear'; + source: 'linear'; + payload: unknown; + projectId: string; + workItemId?: string; + eventType: string; + receivedAt: string; + ackCommentId?: string; + triggerResult?: TriggerResult; +} + +export type CascadeJob = TrelloJob | GitHubJob | JiraJob | SentryJob | LinearJob; // Create the job queue export const jobQueue = new Queue('cascade-jobs', { diff --git a/src/router/webhookVerification.ts b/src/router/webhookVerification.ts index da279c79..5ad268c7 100644 --- a/src/router/webhookVerification.ts +++ b/src/router/webhookVerification.ts @@ -10,6 +10,7 @@ import { logger } from '../utils/logging.js'; import { verifyGitHubSignature, verifyJiraSignature, + verifyLinearSignature, verifySentrySignature, verifyTrelloSignature, } from '../webhook/signatureVerification.js'; @@ -17,7 +18,7 @@ import { loadProjectConfig, routerConfig } from './config.js'; import { resolveWebhookSecret } from './platformClients/credentials.js'; /** The set of platforms that have a webhook secret in {@link resolveWebhookSecret}. */ -type WebhookPlatform = 'github' | 'trello' | 'jira' | 'sentry'; +type WebhookPlatform = 'github' | 'trello' | 'jira' | 'sentry' | 'linear'; // --------------------------------------------------------------------------- // Helpers @@ -268,3 +269,36 @@ export const verifyJiraWebhookSignature = createWebhookVerifier({ ) as { id: string } | undefined, verify: (rawBody, sig, secret) => verifyJiraSignature(rawBody, sig, secret), }); + +/** + * Extract the Linear team ID from a raw webhook payload. + * Linear sends the team ID nested in `data.teamId` for Issue events. + */ +export function extractLinearTeamId(rawBody: string): string | undefined { + try { + const parsed = JSON.parse(rawBody) as Record; + const data = parsed?.data as Record | undefined; + return data?.teamId as string | undefined; + } catch { + return undefined; + } +} + +/** + * verifySignature callback for the Linear webhook handler. + * Returns null to skip verification when no secret is configured (backwards compat). + * + * Linear sends the signature as a raw HMAC-SHA256 hex digest in the + * `Linear-Signature` header (no prefix). + */ +export const verifyLinearWebhookSignature = createWebhookVerifier({ + headerName: 'Linear-Signature', + platform: 'linear', + platformLabel: 'Linear', + extractIdentifier: (_c, rawBody) => extractLinearTeamId(rawBody), + findProject: (teamId, projects) => + projects.find((p) => (p.linear as Record | undefined)?.teamId === teamId) as + | { id: string } + | undefined, + verify: (rawBody, sig, secret) => verifyLinearSignature(rawBody, sig, secret), +}); diff --git a/src/webhook/signatureVerification.ts b/src/webhook/signatureVerification.ts index f20189c9..f3c14963 100644 --- a/src/webhook/signatureVerification.ts +++ b/src/webhook/signatureVerification.ts @@ -165,3 +165,24 @@ export function verifyJiraSignature(rawBody: string, signature: string, secret: prefix: 'sha256=', }); } + +/** + * Verify a Linear webhook signature. + * + * Linear signs payloads with HMAC-SHA256 and sends the result as a raw hex + * digest in the `Linear-Signature` header (no prefix). + * + * @param rawBody - The raw request body string. + * @param signature - The value of the `Linear-Signature` header. + * @param secret - The LINEAR_WEBHOOK_SECRET configured for the webhook. + * @returns `true` if the signature is valid, `false` otherwise. + */ +export function verifyLinearSignature(rawBody: string, signature: string, secret: string): boolean { + return verifyHmac({ + algorithm: 'sha256', + data: rawBody, + secret, + signature, + encoding: 'hex', + }); +} diff --git a/src/webhook/webhookHandlers.ts b/src/webhook/webhookHandlers.ts index b5e9f00a..9a8580d0 100644 --- a/src/webhook/webhookHandlers.ts +++ b/src/webhook/webhookHandlers.ts @@ -23,6 +23,7 @@ import { handleProcessingError, logSuccessfulWebhook } from './webhookLogging.js export { parseGitHubPayload, parseJiraPayload, + parseLinearPayload, parseSentryPayload, parseTrelloPayload, } from './webhookParsers.js'; diff --git a/src/webhook/webhookParsers.ts b/src/webhook/webhookParsers.ts index e4b52a24..a38da013 100644 --- a/src/webhook/webhookParsers.ts +++ b/src/webhook/webhookParsers.ts @@ -112,3 +112,27 @@ export async function parseJiraPayload(c: Context): Promise { return { ok: false, error: String(err) }; } } + +/** + * Parse a Linear webhook request (plain JSON). + * Extracts `{action}/{type}` as the event type (e.g. `create/Issue`, `update/Issue`). + * Linear sends the action in `action` and the resource type in `type`. + */ +export async function parseLinearPayload(c: Context): Promise { + try { + const rawBody = await c.req.text(); + const payload = JSON.parse(rawBody); + const p = payload as Record; + const action = p?.action as string | undefined; + const type = p?.type as string | undefined; + const eventType = action && type ? `${action}/${type}` : (action ?? type ?? 'unknown'); + logger.info('Received Linear webhook', { + action, + type, + eventType, + }); + return { ok: true, payload, eventType, rawBody }; + } catch (err) { + return { ok: false, error: String(err) }; + } +} diff --git a/tests/unit/router/adapters/linear.test.ts b/tests/unit/router/adapters/linear.test.ts new file mode 100644 index 00000000..e9db5e21 --- /dev/null +++ b/tests/unit/router/adapters/linear.test.ts @@ -0,0 +1,489 @@ +import { beforeEach, describe, expect, it, vi } from 'vitest'; + +vi.mock('../../../../src/utils/logging.js', () => ({ + logger: { + info: vi.fn(), + warn: vi.fn(), + error: vi.fn(), + debug: vi.fn(), + }, +})); + +vi.mock('../../../../src/router/config.js', () => ({ + loadProjectConfig: vi.fn(), +})); +vi.mock('../../../../src/router/queue.js', () => ({ + addJob: vi.fn(), +})); +vi.mock('../../../../src/router/acknowledgments.js', () => ({ + postLinearAck: vi.fn(), + resolveLinearBotUserId: vi.fn().mockResolvedValue(null), +})); +vi.mock('../../../../src/router/ackMessageGenerator.js', () => ({ + extractLinearContext: vi.fn().mockReturnValue('Issue: Fix the bug'), + generateAckMessage: vi.fn().mockResolvedValue('Working on it...'), +})); +vi.mock('../../../../src/router/platformClients/index.js', () => ({ + resolveLinearCredentials: vi.fn().mockResolvedValue({ + apiKey: 'lin_api_test', + }), +})); +vi.mock('../../../../src/utils/runLink.js', () => ({ + buildWorkItemRunsLink: vi.fn().mockReturnValue(null), + getDashboardUrl: vi.fn().mockReturnValue(null), +})); +vi.mock('../../../../src/linear/client.js', () => ({ + withLinearCredentials: vi.fn().mockImplementation((_creds: unknown, fn: () => unknown) => fn()), +})); + +import { postLinearAck, resolveLinearBotUserId } from '../../../../src/router/acknowledgments.js'; +import { LinearRouterAdapter } from '../../../../src/router/adapters/linear.js'; +import type { RouterProjectConfig } from '../../../../src/router/config.js'; +import { loadProjectConfig } from '../../../../src/router/config.js'; +import { resolveLinearCredentials } from '../../../../src/router/platformClients/index.js'; +import type { TriggerRegistry } from '../../../../src/triggers/registry.js'; +import { buildWorkItemRunsLink, getDashboardUrl } from '../../../../src/utils/runLink.js'; + +const mockProject: RouterProjectConfig = { + id: 'p1', + repo: 'owner/repo', + pmType: 'linear', + linear: { + teamId: 'team-abc-123', + }, +}; + +const mockTriggerRegistry = { + dispatch: vi.fn().mockResolvedValue(null), +} as unknown as TriggerRegistry; + +beforeEach(() => { + vi.mocked(loadProjectConfig).mockResolvedValue({ + projects: [mockProject], + fullProjects: [{ id: 'p1' } as never], + }); +}); + +const baseLinearPayload = { + action: 'create', + type: 'Issue', + organizationId: 'org-123', + webhookTimestamp: Date.now(), + data: { + id: 'issue-abc', + title: 'Fix the bug', + teamId: 'team-abc-123', + }, + url: 'https://linear.app/team/issue/TEAM-1', +}; + +describe('LinearRouterAdapter', () => { + let adapter: LinearRouterAdapter; + + beforeEach(() => { + adapter = new LinearRouterAdapter(); + }); + + describe('parseWebhook', () => { + it('returns null for empty payload', async () => { + const result = await adapter.parseWebhook({}); + expect(result).toBeNull(); + }); + + it('returns null for unsupported type', async () => { + const result = await adapter.parseWebhook({ + action: 'create', + type: 'CycleIssue', + data: { teamId: 'team-abc-123' }, + }); + expect(result).toBeNull(); + }); + + it('returns null when no project matches teamId', async () => { + vi.mocked(loadProjectConfig).mockResolvedValue({ projects: [], fullProjects: [] }); + const result = await adapter.parseWebhook(baseLinearPayload); + expect(result).toBeNull(); + }); + + it('returns null when no teamId in data', async () => { + const result = await adapter.parseWebhook({ + action: 'create', + type: 'Issue', + data: { id: 'issue-abc', title: 'Test' }, + }); + expect(result).toBeNull(); + }); + + it('returns parsed event for create/Issue', async () => { + const result = await adapter.parseWebhook(baseLinearPayload); + expect(result).not.toBeNull(); + expect(result?.eventType).toBe('create/Issue'); + expect(result?.workItemId).toBe('issue-abc'); + expect(result?.isCommentEvent).toBe(false); + expect(result?.projectIdentifier).toBe('team-abc-123'); + }); + + it('returns parsed event for Comment (isCommentEvent=true)', async () => { + const commentPayload = { + action: 'create', + type: 'Comment', + organizationId: 'org-123', + webhookTimestamp: Date.now(), + data: { + id: 'comment-xyz', + body: 'Great fix!', + issueId: 'issue-abc', + teamId: 'team-abc-123', + }, + url: 'https://linear.app/issue', + }; + const result = await adapter.parseWebhook(commentPayload); + expect(result).not.toBeNull(); + expect(result?.isCommentEvent).toBe(true); + expect(result?.eventType).toBe('create/Comment'); + // For comments, workItemId is the issueId + expect(result?.workItemId).toBe('issue-abc'); + }); + + it('returns parsed event for update/Issue', async () => { + const result = await adapter.parseWebhook({ ...baseLinearPayload, action: 'update' }); + expect(result?.eventType).toBe('update/Issue'); + }); + }); + + describe('isProcessableEvent', () => { + it('returns true for Issue events', () => { + expect( + adapter.isProcessableEvent({ + projectIdentifier: 'team-abc-123', + eventType: 'create/Issue', + isCommentEvent: false, + }), + ).toBe(true); + }); + + it('returns true for Comment events', () => { + expect( + adapter.isProcessableEvent({ + projectIdentifier: 'team-abc-123', + eventType: 'create/Comment', + isCommentEvent: true, + }), + ).toBe(true); + }); + + it('returns false for unknown event types', () => { + expect( + adapter.isProcessableEvent({ + projectIdentifier: 'team-abc-123', + eventType: 'create/Cycle', + isCommentEvent: false, + }), + ).toBe(false); + }); + }); + + describe('isSelfAuthored', () => { + it('returns false for non-comment events', async () => { + const result = await adapter.isSelfAuthored( + { projectIdentifier: 'team-abc-123', eventType: 'create/Issue', isCommentEvent: false }, + { data: { id: 'issue-abc', teamId: 'team-abc-123' } }, + ); + expect(result).toBe(false); + }); + + it('returns false when comment has no userId', async () => { + const result = await adapter.isSelfAuthored( + { projectIdentifier: 'team-abc-123', eventType: 'create/Comment', isCommentEvent: true }, + { data: { id: 'comment-xyz', body: 'Hello' } }, + ); + expect(result).toBe(false); + }); + + it('returns false when bot userId cannot be resolved', async () => { + vi.mocked(resolveLinearBotUserId).mockResolvedValueOnce(null); + const result = await adapter.isSelfAuthored( + { + projectIdentifier: 'team-abc-123', + eventType: 'create/Comment', + isCommentEvent: true, + // @ts-expect-error extended field + projectId: 'p1', + }, + { data: { id: 'comment-xyz', body: 'Hello', userId: 'user-bot-id' } }, + ); + expect(result).toBe(false); + }); + + it('returns true when comment userId matches bot userId', async () => { + vi.mocked(resolveLinearBotUserId).mockResolvedValueOnce('user-bot-id'); + const result = await adapter.isSelfAuthored( + { + projectIdentifier: 'team-abc-123', + eventType: 'create/Comment', + isCommentEvent: true, + // @ts-expect-error extended field + projectId: 'p1', + }, + { data: { id: 'comment-xyz', body: 'Hello', userId: 'user-bot-id' } }, + ); + expect(result).toBe(true); + }); + + it('returns false when comment userId does not match bot userId', async () => { + vi.mocked(resolveLinearBotUserId).mockResolvedValueOnce('user-bot-id'); + const result = await adapter.isSelfAuthored( + { + projectIdentifier: 'team-abc-123', + eventType: 'create/Comment', + isCommentEvent: true, + // @ts-expect-error extended field + projectId: 'p1', + }, + { data: { id: 'comment-xyz', body: 'Hello', userId: 'user-other-id' } }, + ); + expect(result).toBe(false); + }); + }); + + describe('sendReaction', () => { + it('does nothing (no-op)', () => { + // Should not throw + adapter.sendReaction( + { projectIdentifier: 'team-abc-123', eventType: 'create/Issue', isCommentEvent: false }, + {}, + ); + }); + }); + + describe('resolveProject', () => { + it('returns project matching Linear teamId', async () => { + const project = await adapter.resolveProject({ + projectIdentifier: 'team-abc-123', + eventType: 'create/Issue', + isCommentEvent: false, + }); + expect(project?.id).toBe('p1'); + }); + + it('returns null for unknown teamId', async () => { + const project = await adapter.resolveProject({ + projectIdentifier: 'unknown-team', + eventType: 'create/Issue', + isCommentEvent: false, + }); + expect(project).toBeNull(); + }); + }); + + describe('dispatchWithCredentials', () => { + it('dispatches with Linear credentials', async () => { + vi.mocked(mockTriggerRegistry.dispatch).mockResolvedValue({ + agentType: 'implementation', + agentInput: {}, + } as never); + + const result = await adapter.dispatchWithCredentials( + { + projectIdentifier: 'team-abc-123', + eventType: 'create/Issue', + isCommentEvent: false, + // @ts-expect-error extended field + projectId: 'p1', + }, + baseLinearPayload, + mockProject, + mockTriggerRegistry, + ); + expect(result).not.toBeNull(); + expect(mockTriggerRegistry.dispatch).toHaveBeenCalled(); + }); + + it('returns null when Linear credentials are missing', async () => { + vi.mocked(resolveLinearCredentials).mockResolvedValueOnce(null); + + const result = await adapter.dispatchWithCredentials( + { + projectIdentifier: 'team-abc-123', + eventType: 'create/Issue', + isCommentEvent: false, + // @ts-expect-error extended field + projectId: 'p1', + }, + baseLinearPayload, + mockProject, + mockTriggerRegistry, + ); + expect(result).toBeNull(); + }); + + it('returns null when no full project config found', async () => { + vi.mocked(loadProjectConfig).mockResolvedValueOnce({ + projects: [mockProject], + fullProjects: [], + }); + + const result = await adapter.dispatchWithCredentials( + { + projectIdentifier: 'team-abc-123', + eventType: 'create/Issue', + isCommentEvent: false, + }, + baseLinearPayload, + mockProject, + mockTriggerRegistry, + ); + expect(result).toBeNull(); + }); + }); + + describe('postAck', () => { + it('posts ack and returns AckResult', async () => { + vi.mocked(postLinearAck).mockResolvedValue('comment-123'); + + const ackResult = await adapter.postAck( + { + projectIdentifier: 'team-abc-123', + eventType: 'create/Issue', + workItemId: 'issue-abc', + isCommentEvent: false, + // @ts-expect-error extended field + projectId: 'p1', + }, + baseLinearPayload, + mockProject, + 'implementation', + ); + expect(ackResult?.commentId).toBe('comment-123'); + expect(ackResult?.message).toBe('Working on it...'); + }); + + it('returns undefined when no workItemId', async () => { + const ackResult = await adapter.postAck( + { + projectIdentifier: 'team-abc-123', + eventType: 'create/Issue', + workItemId: undefined, + isCommentEvent: false, + // @ts-expect-error extended field + projectId: 'p1', + }, + baseLinearPayload, + mockProject, + 'implementation', + ); + expect(ackResult).toBeUndefined(); + }); + + it('returns undefined when postLinearAck returns null', async () => { + vi.mocked(postLinearAck).mockResolvedValue(null); + + const ackResult = await adapter.postAck( + { + projectIdentifier: 'team-abc-123', + eventType: 'create/Issue', + workItemId: 'issue-abc', + isCommentEvent: false, + // @ts-expect-error extended field + projectId: 'p1', + }, + baseLinearPayload, + mockProject, + 'implementation', + ); + expect(ackResult).toBeUndefined(); + }); + + it('appends run link footer when runLinksEnabled and dashboardUrl available', async () => { + vi.mocked(loadProjectConfig).mockResolvedValue({ + projects: [mockProject], + fullProjects: [{ id: 'p1', runLinksEnabled: true } as never], + }); + vi.mocked(getDashboardUrl).mockReturnValue('https://dashboard.example.com'); + vi.mocked(buildWorkItemRunsLink).mockReturnValue( + '\n[View runs](https://dashboard.example.com/runs)', + ); + vi.mocked(postLinearAck).mockResolvedValue('comment-123'); + + const ackResult = await adapter.postAck( + { + projectIdentifier: 'team-abc-123', + eventType: 'create/Issue', + workItemId: 'issue-abc', + isCommentEvent: false, + // @ts-expect-error extended field + projectId: 'p1', + }, + baseLinearPayload, + mockProject, + 'implementation', + ); + expect(buildWorkItemRunsLink).toHaveBeenCalled(); + expect(ackResult?.message).toContain('[View runs]'); + }); + + it('handles postLinearAck error gracefully', async () => { + vi.mocked(postLinearAck).mockRejectedValue(new Error('API error')); + + const ackResult = await adapter.postAck( + { + projectIdentifier: 'team-abc-123', + eventType: 'create/Issue', + workItemId: 'issue-abc', + isCommentEvent: false, + // @ts-expect-error extended field + projectId: 'p1', + }, + baseLinearPayload, + mockProject, + 'implementation', + ); + expect(ackResult).toBeUndefined(); + }); + }); + + describe('buildJob', () => { + it('builds a linear job with correct fields', () => { + const result = { agentType: 'implementation', agentInput: { issueId: 'issue-abc' } }; + const job = adapter.buildJob( + { + projectIdentifier: 'team-abc-123', + eventType: 'create/Issue', + workItemId: 'issue-abc', + isCommentEvent: false, + // @ts-expect-error extended field + projectId: 'p1', + action: 'create', + resourceType: 'Issue', + }, + baseLinearPayload, + mockProject, + result as never, + ); + expect(job.type).toBe('linear'); + expect((job as { workItemId?: string }).workItemId).toBe('issue-abc'); + expect((job as { ackCommentId?: string }).ackCommentId).toBeUndefined(); + }); + + it('includes ackCommentId when ackResult is provided', () => { + const result = { agentType: 'implementation', agentInput: {} }; + const job = adapter.buildJob( + { + projectIdentifier: 'team-abc-123', + eventType: 'create/Issue', + workItemId: 'issue-abc', + isCommentEvent: false, + // @ts-expect-error extended field + projectId: 'p1', + action: 'create', + resourceType: 'Issue', + }, + baseLinearPayload, + mockProject, + result as never, + { commentId: 'comment-789', message: 'Working...' }, + ); + expect((job as { ackCommentId?: string }).ackCommentId).toBe('comment-789'); + }); + }); +}); diff --git a/tests/unit/router/webhook-signature.test.ts b/tests/unit/router/webhook-signature.test.ts index e66b0852..9fec705e 100644 --- a/tests/unit/router/webhook-signature.test.ts +++ b/tests/unit/router/webhook-signature.test.ts @@ -125,9 +125,11 @@ import { buildTrelloCallbackUrl, createWebhookVerifier, extractJiraProjectKey, + extractLinearTeamId, extractTrelloBoardId, verifyGitHubWebhookSignature, verifyJiraWebhookSignature, + verifyLinearWebhookSignature, verifyTrelloWebhookSignature, } from '../../../src/router/webhookVerification.js'; import { logger } from '../../../src/utils/logging.js'; @@ -180,9 +182,19 @@ const JIRA_PROJECT = { }, }; +const LINEAR_PROJECT = { + id: 'proj-linear', + repo: 'owner/repo', + pmType: 'linear' as const, + linear: { + teamId: 'team-abc-123', + }, +}; + const GITHUB_SECRET = 'my-github-webhook-secret'; const TRELLO_SECRET = 'my-trello-api-secret'; const JIRA_SECRET = 'my-jira-webhook-secret'; +const LINEAR_SECRET = 'my-linear-webhook-secret'; const TRELLO_CALLBACK_URL = 'https://example.com/trello/webhook'; // --------------------------------------------------------------------------- @@ -432,6 +444,119 @@ describe('verifyTrelloWebhookSignature — direct function tests', () => { }); }); +// --------------------------------------------------------------------------- +// Unit tests: extractLinearTeamId +// --------------------------------------------------------------------------- + +describe('extractLinearTeamId', () => { + it('extracts teamId from data.teamId', () => { + const body = JSON.stringify({ + action: 'create', + type: 'Issue', + data: { id: 'issue-abc', teamId: 'team-abc-123' }, + }); + expect(extractLinearTeamId(body)).toBe('team-abc-123'); + }); + + it('returns undefined when data.teamId is missing', () => { + const body = JSON.stringify({ action: 'create', type: 'Issue', data: {} }); + expect(extractLinearTeamId(body)).toBeUndefined(); + }); + + it('returns undefined for invalid JSON', () => { + expect(extractLinearTeamId('not json')).toBeUndefined(); + }); +}); + +// --------------------------------------------------------------------------- +// Unit tests: verifyLinearWebhookSignature (function directly) +// --------------------------------------------------------------------------- + +describe('verifyLinearWebhookSignature — direct function tests', () => { + beforeEach(() => { + vi.mocked(loadProjectConfig).mockResolvedValue({ projects: [LINEAR_PROJECT] }); + vi.mocked(resolveWebhookSecret).mockResolvedValue(LINEAR_SECRET); + }); + + function makeContext(headers: Record = {}) { + return { + req: { + header: (name: string) => headers[name.toLowerCase()] ?? headers[name], + }, + } as unknown as import('hono').Context; + } + + function linearSignature(body: string, secret: string): string { + return createHmac('sha256', secret).update(body, 'utf8').digest('hex'); + } + + it('returns { valid: true } when signature is correct', async () => { + const body = JSON.stringify({ + action: 'create', + type: 'Issue', + data: { id: 'issue-abc', teamId: 'team-abc-123' }, + }); + const sig = linearSignature(body, LINEAR_SECRET); + const result = await verifyLinearWebhookSignature( + makeContext({ 'Linear-Signature': sig }), + body, + ); + expect(result).toEqual({ valid: true, reason: 'Signature valid' }); + }); + + it('returns { valid: false } when signature is wrong', async () => { + const body = JSON.stringify({ + action: 'create', + type: 'Issue', + data: { id: 'issue-abc', teamId: 'team-abc-123' }, + }); + const badSig = linearSignature(body, 'wrong-secret'); + const result = await verifyLinearWebhookSignature( + makeContext({ 'Linear-Signature': badSig }), + body, + ); + expect(result).toEqual({ valid: false, reason: 'Linear signature mismatch' }); + }); + + it('returns { valid: false, reason: "Missing signature header" } when header absent but secret configured', async () => { + const body = JSON.stringify({ + action: 'create', + type: 'Issue', + data: { id: 'issue-abc', teamId: 'team-abc-123' }, + }); + const result = await verifyLinearWebhookSignature(makeContext({}), body); + expect(result).toEqual({ valid: false, reason: 'Missing signature header' }); + }); + + it('returns null (skip) when no secret configured', async () => { + vi.mocked(resolveWebhookSecret).mockResolvedValue(null); + const body = JSON.stringify({ + action: 'create', + type: 'Issue', + data: { id: 'issue-abc', teamId: 'team-abc-123' }, + }); + const result = await verifyLinearWebhookSignature(makeContext({}), body); + expect(result).toBeNull(); + }); + + it('returns null (skip) when project not found for teamId', async () => { + vi.mocked(loadProjectConfig).mockResolvedValue({ projects: [] }); + const body = JSON.stringify({ + action: 'create', + type: 'Issue', + data: { id: 'issue-abc', teamId: 'unknown-team' }, + }); + const result = await verifyLinearWebhookSignature(makeContext({}), body); + expect(result).toBeNull(); + }); + + it('returns null (skip) when teamId is missing from payload', async () => { + const body = JSON.stringify({ action: 'create', type: 'Issue', data: {} }); + const result = await verifyLinearWebhookSignature(makeContext({}), body); + expect(result).toBeNull(); + }); +}); + // --------------------------------------------------------------------------- // Integration tests: end-to-end via Hono app (mirrors src/router/index.ts wiring) // --------------------------------------------------------------------------- From 8d20981e55b87271242b567341cd9321dc9fa8a2 Mon Sep 17 00:00:00 2001 From: aaight Date: Tue, 14 Apr 2026 22:40:49 +0200 Subject: [PATCH 30/52] feat(linear): add Linear worker dispatch and trigger handlers (#1100) * feat(linear): add Linear worker dispatch and trigger handlers * fix(linear): address review feedback on duplication and dead code - Extract STATUS_TO_AGENT to src/triggers/shared/status-to-agent.ts and re-export from jira/types.ts and linear/types.ts to eliminate duplication - Replace inline Linear viewer query in comment-mention.ts with the existing resolveLinearBotUserId() from bot-identity-resolvers.ts (cached, no raw fetch) - Clean up hasMention(): remove dead code branch and fix inconsistent JSDoc Co-Authored-By: Claude Opus 4.6 --------- Co-authored-by: Cascade Bot Co-authored-by: Claude Opus 4.6 --- src/router/reactions.ts | 10 +- src/triggers/builtins.ts | 2 + src/triggers/jira/types.ts | 16 +-- src/triggers/linear/comment-mention.ts | 134 +++++++++++++++++++++++++ src/triggers/linear/index.ts | 11 ++ src/triggers/linear/label-added.ts | 130 ++++++++++++++++++++++++ src/triggers/linear/register.ts | 29 ++++++ src/triggers/linear/status-changed.ts | 108 ++++++++++++++++++++ src/triggers/linear/types.ts | 80 +++++++++++++++ src/triggers/linear/webhook-handler.ts | 21 ++++ src/triggers/shared/status-to-agent.ts | 17 ++++ src/worker-entry.ts | 31 ++++++ tests/unit/triggers/builtins.test.ts | 29 +++++- 13 files changed, 600 insertions(+), 18 deletions(-) create mode 100644 src/triggers/linear/comment-mention.ts create mode 100644 src/triggers/linear/index.ts create mode 100644 src/triggers/linear/label-added.ts create mode 100644 src/triggers/linear/register.ts create mode 100644 src/triggers/linear/status-changed.ts create mode 100644 src/triggers/linear/types.ts create mode 100644 src/triggers/linear/webhook-handler.ts create mode 100644 src/triggers/shared/status-to-agent.ts diff --git a/src/router/reactions.ts b/src/router/reactions.ts index 010f844d..23a1d8df 100644 --- a/src/router/reactions.ts +++ b/src/router/reactions.ts @@ -160,13 +160,19 @@ async function sendJiraReaction(projectId: string, payload: unknown): Promise { + // Linear does not support emoji reactions on comments via the same API pattern + // as Trello/JIRA. This is a no-op placeholder for API consistency. + logger.info('[Reactions] Linear reaction skipped (not supported via webhook API)'); +} + // --------------------------------------------------------------------------- // Main entry point // --------------------------------------------------------------------------- /** * Send an acknowledgment reaction for an incoming webhook. - * Dispatches to Trello (👀), GitHub (👀), or JIRA (💭) based on source. + * Dispatches to Trello (👀), GitHub (👀), JIRA (💭), or Linear (no-op) based on source. * * For GitHub, pass `repoFullName` as the `projectId` parameter, along with * `personaIdentities` and the already-resolved `project`. The reaction is @@ -189,6 +195,8 @@ export async function sendAcknowledgeReaction( await sendGitHubReaction(projectId, payload, personaIdentities, project); } else if (source === 'jira') { await sendJiraReaction(projectId, payload); + } else if (source === 'linear') { + await sendLinearReaction(projectId, payload); } } catch (err) { logger.error('[Reactions] Unexpected error sending reaction:', String(err)); diff --git a/src/triggers/builtins.ts b/src/triggers/builtins.ts index 80aefcb6..353060b9 100644 --- a/src/triggers/builtins.ts +++ b/src/triggers/builtins.ts @@ -20,6 +20,7 @@ import { registerGitHubTriggers } from './github/register.js'; import { registerJiraTriggers } from './jira/register.js'; +import { registerLinearTriggers } from './linear/register.js'; import type { TriggerRegistry } from './registry.js'; import { registerSentryTriggers } from './sentry/register.js'; import { registerTrelloTriggers } from './trello/register.js'; @@ -27,6 +28,7 @@ import { registerTrelloTriggers } from './trello/register.js'; export function registerBuiltInTriggers(registry: TriggerRegistry): void { registerTrelloTriggers(registry); registerJiraTriggers(registry); + registerLinearTriggers(registry); registerGitHubTriggers(registry); registerSentryTriggers(registry); } diff --git a/src/triggers/jira/types.ts b/src/triggers/jira/types.ts index 640ff4ea..37941c66 100644 --- a/src/triggers/jira/types.ts +++ b/src/triggers/jira/types.ts @@ -35,18 +35,4 @@ export interface JiraWebhookPayload { // Constants // --------------------------------------------------------------------------- -/** - * Maps CASCADE status keys to agent types. - * - * Project config maps CASCADE status names to JIRA status names, e.g.: - * { splitting: "Splitting", planning: "Planning", todo: "To Do" } - * - * We invert that mapping at runtime: if the issue transitioned to "Splitting", - * we look up `splitting` → `splitting` agent. - */ -export const STATUS_TO_AGENT: Record = { - splitting: 'splitting', - planning: 'planning', - todo: 'implementation', - backlog: 'backlog-manager', -}; +export { STATUS_TO_AGENT } from '../shared/status-to-agent.js'; diff --git a/src/triggers/linear/comment-mention.ts b/src/triggers/linear/comment-mention.ts new file mode 100644 index 00000000..56d1bce8 --- /dev/null +++ b/src/triggers/linear/comment-mention.ts @@ -0,0 +1,134 @@ +/** + * Linear comment @mention trigger. + * + * Fires when someone @mentions the CASCADE bot user in a Linear issue comment. + * Runs the respond-to-planning-comment agent. + * + * Linear webhook structure for comment creation: + * action: 'create', type: 'Comment' + * data.body: the comment text (plain markdown) + * data.userId: the author's user ID + * data.issueId: the issue ID + * data.issue.identifier: the issue identifier (e.g. TEAM-123) + */ + +import { resolveLinearBotUserId } from '../../router/bot-identity-resolvers.js'; +import type { TriggerContext, TriggerHandler, TriggerResult } from '../../types/index.js'; +import { logger } from '../../utils/logging.js'; +import { checkTriggerEnabled } from '../shared/trigger-check.js'; +import type { LinearWebhookCommentTriggerData, LinearWebhookTriggerPayload } from './types.js'; + +/** + * Check if a Linear comment body contains an @mention for the given user ID. + * Linear uses @[Display Name](userId) markdown mention syntax, where userId is + * a UUID. Checking for userId as a substring is sufficient and safe in practice. + */ +function hasMention(body: string, userId: string): boolean { + return body.includes(userId); +} + +export class LinearCommentMentionTrigger implements TriggerHandler { + name = 'linear-comment-mention'; + description = + 'Triggers respond-to-planning-comment agent when someone @mentions the bot in a Linear comment'; + + matches(ctx: TriggerContext): boolean { + if (ctx.source !== 'linear') return false; + + const payload = ctx.payload as LinearWebhookTriggerPayload; + return payload.action === 'create' && payload.type === 'Comment'; + } + + async handle(ctx: TriggerContext): Promise { + // Check trigger config via DB-driven system + if ( + !(await checkTriggerEnabled( + ctx.project.id, + 'respond-to-planning-comment', + 'pm:comment-mention', + this.name, + )) + ) { + return null; + } + + const payload = ctx.payload as LinearWebhookTriggerPayload; + const data = payload.data as LinearWebhookCommentTriggerData; + + const commentBody = data.body; + const commentAuthorId = data.userId; + const issue = data.issue; + const issueIdentifier = issue?.identifier ?? issue?.id; + const issueId = issue?.id ?? data.issueId; + + logger.info('Linear comment trigger processing', { + issueIdentifier: issueIdentifier ?? '', + hasCommentBody: !!commentBody, + commentAuthorId: commentAuthorId ?? '', + }); + + if (!issueIdentifier || !commentBody) { + logger.info('Linear comment trigger: missing issueIdentifier or commentBody, skipping', { + hasIssueIdentifier: !!issueIdentifier, + hasCommentBody: !!commentBody, + }); + return null; + } + + // Resolve the bot's Linear user ID via the shared cached resolver + const botUserId = await resolveLinearBotUserId(ctx.project.id); + + if (!botUserId) { + logger.warn('Linear comment trigger: could not resolve bot user ID, skipping', { + projectId: ctx.project.id, + }); + return null; + } + + logger.info('Linear bot identity resolved', { botUserId }); + + // Skip self-authored comments to prevent infinite loops + if (commentAuthorId === botUserId) { + logger.info('Skipping self-authored Linear comment to prevent infinite loop', { + issueIdentifier, + botUserId, + }); + return null; + } + + // Check for bot @mention in comment body + const mentionFound = hasMention(commentBody, botUserId); + if (!mentionFound) { + logger.info('Linear comment trigger: no @mention of bot found in comment body', { + issueIdentifier, + botUserId, + bodyPreview: commentBody.length > 200 ? `${commentBody.slice(0, 200)}...` : commentBody, + }); + return null; + } + + const issueUrl = issue?.url; + + logger.info('Linear comment @mention detected, triggering agent', { + issueIdentifier, + commentAuthorId, + botUserId, + }); + + return { + agentType: 'respond-to-planning-comment', + agentInput: { + workItemId: issueIdentifier, + triggerCommentText: commentBody, + triggerCommentAuthor: commentAuthorId, + workItemUrl: issueUrl, + workItemTitle: undefined, + triggerEvent: 'pm:comment-mention', + linearIssueId: issueId, + }, + workItemId: issueIdentifier, + workItemUrl: issueUrl, + workItemTitle: undefined, + }; + } +} diff --git a/src/triggers/linear/index.ts b/src/triggers/linear/index.ts new file mode 100644 index 00000000..14ad3d30 --- /dev/null +++ b/src/triggers/linear/index.ts @@ -0,0 +1,11 @@ +/** + * Linear trigger barrel. + * + * For trigger registration use `registerLinearTriggers` from `./register.js`. + */ + +export { LinearCommentMentionTrigger } from './comment-mention.js'; +export { LinearReadyToProcessLabelTrigger } from './label-added.js'; +export { registerLinearTriggers } from './register.js'; +export { LinearStatusChangedTrigger } from './status-changed.js'; +export { processLinearWebhook } from './webhook-handler.js'; diff --git a/src/triggers/linear/label-added.ts b/src/triggers/linear/label-added.ts new file mode 100644 index 00000000..adb18e3f --- /dev/null +++ b/src/triggers/linear/label-added.ts @@ -0,0 +1,130 @@ +/** + * Linear "Ready to Process" label trigger. + * + * Fires when an IssueLabel is created (action=create, type=IssueLabel) + * matching the configured readyToProcess label. Determines which agent to run + * based on the issue's current state, using the same state→agent mapping as + * the status-changed trigger. + * + * Linear webhook structure for label additions: + * action: 'create', type: 'IssueLabel' + * data.labelId: the added label ID + * data.label.name: the label name + * data.issue.stateId: current state ID of the issue + */ + +import { getLinearConfig } from '../../pm/config.js'; +import { resolveProjectPMConfig } from '../../pm/lifecycle.js'; +import type { TriggerContext, TriggerHandler, TriggerResult } from '../../types/index.js'; +import { logger } from '../../utils/logging.js'; +import { checkTriggerEnabled } from '../shared/trigger-check.js'; +import { + type LinearWebhookIssueLabelData, + type LinearWebhookTriggerPayload, + STATUS_TO_AGENT, +} from './types.js'; + +export class LinearReadyToProcessLabelTrigger implements TriggerHandler { + name = 'linear-ready-to-process-label-added'; + description = 'Triggers agent based on current state when "Ready to Process" label is added'; + + matches(ctx: TriggerContext): boolean { + if (ctx.source !== 'linear') return false; + + const payload = ctx.payload as LinearWebhookTriggerPayload; + if (payload.action !== 'create' || payload.type !== 'IssueLabel') return false; + + // Check that the configured readyToProcess label was actually added + const pmConfig = resolveProjectPMConfig(ctx.project); + const readyLabel = pmConfig.labels.readyToProcess; + if (!readyLabel) return false; + + const data = payload.data as LinearWebhookIssueLabelData; + const labelName = data.label?.name; + if (!labelName) return false; + + return labelName === readyLabel || data.labelId === readyLabel; + } + + async handle(ctx: TriggerContext): Promise { + const payload = ctx.payload as LinearWebhookTriggerPayload; + const data = payload.data as LinearWebhookIssueLabelData; + + const issue = data.issue; + const issueIdentifier = issue?.identifier ?? issue?.id; + const issueId = issue?.id; + const issueUrl = issue?.url; + const issueStateId = issue?.stateId; + + if (!issueIdentifier) { + logger.debug('Linear label trigger: missing issue identifier, skipping'); + return null; + } + + if (!issueStateId) { + logger.debug('No state ID on Linear issue, cannot determine agent type', { + issueIdentifier, + }); + return null; + } + + const linearConfig = getLinearConfig(ctx.project); + if (!linearConfig?.statuses) { + logger.debug('No Linear status configuration, skipping label trigger', { + projectId: ctx.project.id, + }); + return null; + } + + // Find which CASCADE status key maps to this Linear state ID + let agentType: string | undefined; + let matchedCascadeStatus: string | undefined; + for (const [cascadeStatus, linearStateId] of Object.entries(linearConfig.statuses)) { + if (linearStateId === issueStateId) { + agentType = STATUS_TO_AGENT[cascadeStatus]; + matchedCascadeStatus = cascadeStatus; + break; + } + } + + if (!agentType) { + logger.debug('Linear issue state does not map to any agent', { + issueIdentifier, + issueStateId, + configuredStatuses: linearConfig.statuses, + }); + return null; + } + + // Check per-agent ready-to-process toggle via DB-driven system + if (!(await checkTriggerEnabled(ctx.project.id, agentType, 'pm:label-added', this.name))) { + return null; + } + + logger.info('Linear "Ready to Process" label added, triggering agent', { + issueIdentifier, + issueStateId, + cascadeStatus: matchedCascadeStatus, + agentType, + }); + + const workItemId = issueIdentifier; + const workItemUrl = issueUrl; + // Issue title is not included in IssueLabel webhook data + const workItemTitle: string | undefined = undefined; + + return { + agentType, + agentInput: { + workItemId, + workItemUrl, + workItemTitle, + triggerEvent: 'pm:label-added', + linearIssueId: issueId, + }, + workItemId, + workItemUrl, + workItemTitle, + }; + } +} diff --git a/src/triggers/linear/register.ts b/src/triggers/linear/register.ts new file mode 100644 index 00000000..5f347fab --- /dev/null +++ b/src/triggers/linear/register.ts @@ -0,0 +1,29 @@ +/** + * Linear trigger registration. + * + * This module only imports trigger handler classes (no webhook handlers, + * no agent execution pipeline) so it is safe to import from the router. + * + * `registerLinearTriggers` is the single call-site for wiring all built-in + * Linear triggers into a registry. Adding a new Linear trigger only + * requires updating this file, not `builtins.ts`. + */ + +import type { TriggerRegistry } from '../registry.js'; +import { LinearCommentMentionTrigger } from './comment-mention.js'; +import { LinearReadyToProcessLabelTrigger } from './label-added.js'; +import { LinearStatusChangedTrigger } from './status-changed.js'; + +/** + * Register all built-in Linear triggers into the given registry. + * + * Order matters: LinearCommentMentionTrigger must be registered before + * the status-changed trigger so it gets first crack at comment events. + */ +export function registerLinearTriggers(registry: TriggerRegistry): void { + // Must be registered before status-changed trigger + registry.register(new LinearCommentMentionTrigger()); + + registry.register(new LinearStatusChangedTrigger()); + registry.register(new LinearReadyToProcessLabelTrigger()); +} diff --git a/src/triggers/linear/status-changed.ts b/src/triggers/linear/status-changed.ts new file mode 100644 index 00000000..c9b8aa5f --- /dev/null +++ b/src/triggers/linear/status-changed.ts @@ -0,0 +1,108 @@ +/** + * Linear status-changed trigger. + * + * Fires when a Linear issue transitions to a configured state (by state ID) + * that maps to a CASCADE agent type (splitting, planning, implementation). + * + * Linear webhook structure for status changes: + * action: 'update', type: 'Issue' + * data.stateId: new state ID + * updatedFrom.stateId: previous state ID (only present when stateId changed) + */ + +import { getLinearConfig } from '../../pm/config.js'; +import type { TriggerContext, TriggerHandler, TriggerResult } from '../../types/index.js'; +import { logger } from '../../utils/logging.js'; +import { checkTriggerEnabled } from '../shared/trigger-check.js'; +import { type LinearWebhookTriggerPayload, STATUS_TO_AGENT } from './types.js'; + +export class LinearStatusChangedTrigger implements TriggerHandler { + name = 'linear-status-changed'; + description = 'Triggers agent when a Linear issue transitions to a configured state'; + + matches(ctx: TriggerContext): boolean { + if (ctx.source !== 'linear') return false; + + const payload = ctx.payload as LinearWebhookTriggerPayload; + if (payload.action !== 'update' || payload.type !== 'Issue') return false; + + // Must have a state change indicated by updatedFrom.stateId + return typeof payload.updatedFrom?.stateId === 'string'; + } + + async handle(ctx: TriggerContext): Promise { + const payload = ctx.payload as LinearWebhookTriggerPayload; + const data = payload.data as Record; + + const newStateId = data.stateId as string | undefined; + const issueIdentifier = + (data.identifier as string | undefined) ?? (data.id as string | undefined); + const issueId = data.id as string | undefined; + const issueTitle = data.title as string | undefined; + const issueUrl = data.url as string | undefined; + + if (!newStateId || !issueIdentifier) { + return null; + } + + const linearConfig = getLinearConfig(ctx.project); + if (!linearConfig?.statuses) { + logger.debug('No Linear status configuration, skipping status-changed trigger', { + projectId: ctx.project.id, + }); + return null; + } + + // Find which CASCADE status key maps to this Linear state ID + let agentType: string | undefined; + let matchedCascadeStatus: string | undefined; + for (const [cascadeStatus, linearStateId] of Object.entries(linearConfig.statuses)) { + if (linearStateId === newStateId) { + agentType = STATUS_TO_AGENT[cascadeStatus]; + matchedCascadeStatus = cascadeStatus; + break; + } + } + + if (!agentType) { + logger.debug('Linear state transition does not map to any agent', { + issueIdentifier, + newStateId, + configuredStatuses: linearConfig.statuses, + }); + return null; + } + + // Check per-agent toggle for statusChanged via DB-driven system + if (!(await checkTriggerEnabled(ctx.project.id, agentType, 'pm:status-changed', this.name))) { + return null; + } + + logger.info('Linear issue transitioned to agent-triggering state', { + issueIdentifier, + previousStateId: payload.updatedFrom?.stateId, + newStateId, + cascadeStatus: matchedCascadeStatus, + agentType, + }); + + // Use issueIdentifier (e.g. TEAM-123) as the workItemId, falling back to id + const workItemId = issueIdentifier; + const workItemUrl = issueUrl; + const workItemTitle = issueTitle; + + return { + agentType, + agentInput: { + workItemId, + workItemUrl, + workItemTitle, + triggerEvent: 'pm:status-changed', + linearIssueId: issueId, + }, + workItemId, + workItemUrl, + workItemTitle, + }; + } +} diff --git a/src/triggers/linear/types.ts b/src/triggers/linear/types.ts new file mode 100644 index 00000000..718ed812 --- /dev/null +++ b/src/triggers/linear/types.ts @@ -0,0 +1,80 @@ +/** + * Shared Linear webhook types and constants used across Linear trigger handlers. + */ + +// --------------------------------------------------------------------------- +// Webhook Payload +// --------------------------------------------------------------------------- + +export interface LinearWebhookIssueTriggerData { + id: string; + identifier: string; + title: string; + description?: string | null; + priority: number; + priorityLabel: string; + url: string; + teamId: string; + stateId: string; + assigneeId?: string | null; + labelIds: string[]; + createdAt: string; + updatedAt: string; +} + +export interface LinearWebhookCommentTriggerData { + id: string; + body: string; + issueId: string; + userId: string; + createdAt: string; + updatedAt: string; + issue?: { + id: string; + identifier: string; + title: string; + teamId: string; + url: string; + stateId: string; + }; +} + +export interface LinearWebhookIssueLabelData { + id: string; + issueId: string; + labelId: string; + label?: { + id: string; + name: string; + }; + issue?: { + id: string; + identifier: string; + title: string; + teamId: string; + url: string; + stateId: string; + }; + teamId?: string; +} + +export interface LinearWebhookTriggerPayload { + action: 'create' | 'update' | 'remove'; + type: 'Issue' | 'Comment' | 'IssueLabel' | 'Reaction'; + organizationId: string; + webhookTimestamp: number; + data: + | LinearWebhookIssueTriggerData + | LinearWebhookCommentTriggerData + | LinearWebhookIssueLabelData + | Record; + url: string; + /** Present on update events — contains the previous values of changed fields */ + updatedFrom?: Record; +} + +// --------------------------------------------------------------------------- +// Constants +// --------------------------------------------------------------------------- + +export { STATUS_TO_AGENT } from '../shared/status-to-agent.js'; diff --git a/src/triggers/linear/webhook-handler.ts b/src/triggers/linear/webhook-handler.ts new file mode 100644 index 00000000..a6dadf33 --- /dev/null +++ b/src/triggers/linear/webhook-handler.ts @@ -0,0 +1,21 @@ +/** + * Linear webhook handler. + * + * Thin wrapper around the generic PM webhook processor. + * Resolves the Linear integration from the registry and delegates. + */ + +import { pmRegistry } from '../../pm/index.js'; +import { processPMWebhook } from '../../pm/webhook-handler.js'; +import type { TriggerResult } from '../../types/index.js'; +import type { TriggerRegistry } from '../registry.js'; + +export async function processLinearWebhook( + payload: unknown, + registry: TriggerRegistry, + ackCommentId?: string, + triggerResult?: TriggerResult, +): Promise { + const integration = pmRegistry.get('linear'); + await processPMWebhook(integration, payload, registry, ackCommentId, triggerResult); +} diff --git a/src/triggers/shared/status-to-agent.ts b/src/triggers/shared/status-to-agent.ts new file mode 100644 index 00000000..71f66932 --- /dev/null +++ b/src/triggers/shared/status-to-agent.ts @@ -0,0 +1,17 @@ +/** + * Shared status-to-agent mapping used across PM trigger handlers (JIRA, Linear, etc.). + * + * Maps CASCADE status keys to agent types. + * + * Project config maps CASCADE status names to platform-specific status/state + * names, e.g.: { splitting: "Splitting", planning: "Planning", todo: "To Do" } + * + * We invert that mapping at runtime: if the issue transitioned to "Splitting", + * we look up `splitting` → `splitting` agent. + */ +export const STATUS_TO_AGENT: Record = { + splitting: 'splitting', + planning: 'planning', + todo: 'implementation', + backlog: 'backlog-manager', +}; diff --git a/src/worker-entry.ts b/src/worker-entry.ts index 2072559e..ac4d067e 100644 --- a/src/worker-entry.ts +++ b/src/worker-entry.ts @@ -27,6 +27,7 @@ import { registerBuiltInTriggers, type TriggerRegistry, } from './triggers/index.js'; +import { processLinearWebhook } from './triggers/linear/webhook-handler.js'; import { processSentryWebhook } from './triggers/sentry/webhook-handler.js'; import { processTrelloWebhook } from './triggers/trello/webhook-handler.js'; import type { TriggerResult } from './types/index.js'; @@ -80,6 +81,19 @@ export interface SentryJobData { triggerResult?: TriggerResult; } +export interface LinearJobData { + type: 'linear'; + source: 'linear'; + payload: unknown; + projectId: string; + workItemId?: string; + /** Linear event type: e.g. 'create/Issue', 'update/Issue', 'create/Comment' */ + eventType: string; + receivedAt: string; + ackCommentId?: string; + triggerResult?: TriggerResult; +} + export interface ManualRunJobData { type: 'manual-run'; projectId: string; @@ -113,6 +127,7 @@ export type JobData = | GitHubJobData | JiraJobData | SentryJobData + | LinearJobData | DashboardJobData; export async function processDashboardJob(jobId: string, jobData: DashboardJobData): Promise { @@ -226,6 +241,22 @@ export async function dispatchJob( jobData.triggerResult, ); break; + case 'linear': + logger.info('[Worker] Processing Linear job', { + jobId, + projectId: jobData.projectId, + workItemId: jobData.workItemId, + eventType: jobData.eventType, + ackCommentId: jobData.ackCommentId, + hasTriggerResult: !!jobData.triggerResult, + }); + await processLinearWebhook( + jobData.payload, + triggerRegistry, + jobData.ackCommentId, + jobData.triggerResult, + ); + break; case 'manual-run': case 'retry-run': case 'debug-analysis': diff --git a/tests/unit/triggers/builtins.test.ts b/tests/unit/triggers/builtins.test.ts index 3425a031..c915c3ce 100644 --- a/tests/unit/triggers/builtins.test.ts +++ b/tests/unit/triggers/builtins.test.ts @@ -62,6 +62,20 @@ vi.mock('../../../src/triggers/sentry/alerting-metric.js', () => ({ SentryMetricAlertTrigger: vi.fn().mockImplementation(() => ({ name: 'sentry-metric-alert' })), })); +vi.mock('../../../src/triggers/linear/comment-mention.js', () => ({ + LinearCommentMentionTrigger: vi + .fn() + .mockImplementation(() => ({ name: 'linear-comment-mention' })), +})); +vi.mock('../../../src/triggers/linear/status-changed.js', () => ({ + LinearStatusChangedTrigger: vi.fn().mockImplementation(() => ({ name: 'linear-status-changed' })), +})); +vi.mock('../../../src/triggers/linear/label-added.js', () => ({ + LinearReadyToProcessLabelTrigger: vi + .fn() + .mockImplementation(() => ({ name: 'linear-ready-to-process-label-added' })), +})); + vi.mock('../../../src/utils/logging.js', () => ({ logger: { debug: vi.fn(), @@ -88,8 +102,8 @@ describe('registerBuiltInTriggers', () => { registerBuiltInTriggers(registry as unknown as TriggerRegistry); - // Should have registered all 21 built-in triggers (19 + 2 Sentry alerting triggers) - expect(registry.register).toHaveBeenCalledTimes(21); + // Should have registered all 24 built-in triggers (19 + 2 Sentry alerting + 3 Linear triggers) + expect(registry.register).toHaveBeenCalledTimes(24); }); it('registers TrelloCommentMentionTrigger first', () => { @@ -142,6 +156,17 @@ describe('registerBuiltInTriggers', () => { expect(registeredNames).toContain('jira-label-added'); }); + it('registers Linear triggers', () => { + const registry = createMockRegistry(); + + registerBuiltInTriggers(registry as unknown as TriggerRegistry); + + const registeredNames = registry.handlers.map((h: object) => (h as { name: string }).name); + expect(registeredNames).toContain('linear-comment-mention'); + expect(registeredNames).toContain('linear-status-changed'); + expect(registeredNames).toContain('linear-ready-to-process-label-added'); + }); + it('registers Sentry alerting triggers', () => { const registry = createMockRegistry(); From ce958e5f9794f32833ed3ac0042c1d6d831b21c8 Mon Sep 17 00:00:00 2001 From: aaight Date: Tue, 14 Apr 2026 22:53:00 +0200 Subject: [PATCH 31/52] test(linear): add unit tests for Linear trigger handlers (#1101) Co-authored-by: Cascade Bot --- .../triggers/linear-comment-mention.test.ts | 234 ++++++++++++++ .../unit/triggers/linear-label-added.test.ts | 298 ++++++++++++++++++ .../triggers/linear-status-changed.test.ts | 256 +++++++++++++++ 3 files changed, 788 insertions(+) create mode 100644 tests/unit/triggers/linear-comment-mention.test.ts create mode 100644 tests/unit/triggers/linear-label-added.test.ts create mode 100644 tests/unit/triggers/linear-status-changed.test.ts diff --git a/tests/unit/triggers/linear-comment-mention.test.ts b/tests/unit/triggers/linear-comment-mention.test.ts new file mode 100644 index 00000000..c8a80c71 --- /dev/null +++ b/tests/unit/triggers/linear-comment-mention.test.ts @@ -0,0 +1,234 @@ +import { beforeEach, describe, expect, it, vi } from 'vitest'; +import { mockLogger, mockTriggerCheckModule } from '../../helpers/sharedMocks.js'; + +vi.mock('../../../src/utils/logging.js', () => ({ logger: mockLogger })); +vi.mock('../../../src/triggers/shared/trigger-check.js', () => mockTriggerCheckModule); + +// Mock resolveLinearBotUserId to avoid real API calls +const mockResolveLinearBotUserId = vi.fn(); +vi.mock('../../../src/router/bot-identity-resolvers.js', () => ({ + resolveLinearBotUserId: (...args: unknown[]) => mockResolveLinearBotUserId(...args), +})); + +import { LinearCommentMentionTrigger } from '../../../src/triggers/linear/comment-mention.js'; +import { checkTriggerEnabled } from '../../../src/triggers/shared/trigger-check.js'; +import type { TriggerContext } from '../../../src/types/index.js'; + +// --------------------------------------------------------------------------- +// Helpers +// --------------------------------------------------------------------------- + +const BOT_USER_ID = 'bot-user-uuid-001'; +const OTHER_USER_ID = 'user-other-uuid-456'; +const ISSUE_IDENTIFIER = 'TEAM-99'; +const ISSUE_ID = 'issue-uuid-99'; + +const mockProject = { + id: 'proj-linear', + orgId: 'org-1', + name: 'Linear Project', + repo: 'owner/repo', + baseBranch: 'main', + branchPrefix: 'feature/', + pm: { type: 'linear' as const }, +} as TriggerContext['project']; + +function buildCtx( + overrides: { + source?: TriggerContext['source']; + action?: string; + type?: string; + commentBody?: string; + commentAuthorId?: string; + issueIdentifier?: string; + issueId?: string; + issueUrl?: string; + noIssue?: boolean; + } = {}, +): TriggerContext { + const issue = overrides.noIssue + ? undefined + : { + id: overrides.issueId ?? ISSUE_ID, + identifier: overrides.issueIdentifier ?? ISSUE_IDENTIFIER, + title: 'Test issue', + teamId: 'team-abc', + url: overrides.issueUrl ?? 'https://linear.app/org/issue/TEAM-99', + stateId: 'state-todo', + }; + + return { + project: mockProject, + source: overrides.source ?? 'linear', + payload: { + action: overrides.action ?? 'create', + type: overrides.type ?? 'Comment', + organizationId: 'org-123', + webhookTimestamp: Date.now(), + data: { + id: 'comment-uuid', + // Include botUserId in the body to simulate an @mention + body: overrides.commentBody ?? `@[Bot User](${BOT_USER_ID}) please help with this issue`, + issueId: ISSUE_ID, + userId: overrides.commentAuthorId ?? OTHER_USER_ID, + createdAt: '2024-01-01T00:00:00Z', + updatedAt: '2024-01-01T00:00:00Z', + issue, + }, + url: 'https://linear.app', + }, + }; +} + +// --------------------------------------------------------------------------- +// Tests +// --------------------------------------------------------------------------- + +describe('LinearCommentMentionTrigger', () => { + let trigger: LinearCommentMentionTrigger; + + beforeEach(() => { + vi.resetAllMocks(); + vi.mocked(checkTriggerEnabled).mockResolvedValue(true); + mockResolveLinearBotUserId.mockResolvedValue(BOT_USER_ID); + trigger = new LinearCommentMentionTrigger(); + }); + + // ========================================================================= + // matches + // ========================================================================= + describe('matches', () => { + it('matches create/Comment events from linear source', () => { + expect(trigger.matches(buildCtx())).toBe(true); + }); + + it('does not match non-linear source', () => { + expect(trigger.matches(buildCtx({ source: 'jira' }))).toBe(false); + }); + + it('does not match non-create actions', () => { + expect(trigger.matches(buildCtx({ action: 'update' }))).toBe(false); + }); + + it('does not match non-Comment types', () => { + expect(trigger.matches(buildCtx({ type: 'Issue' }))).toBe(false); + }); + + it('does not match IssueLabel type', () => { + expect(trigger.matches(buildCtx({ type: 'IssueLabel' }))).toBe(false); + }); + }); + + // ========================================================================= + // handle + // ========================================================================= + describe('handle', () => { + it('returns respond-to-planning-comment result when @mention found', async () => { + const result = await trigger.handle(buildCtx()); + + expect(result).not.toBeNull(); + expect(result?.agentType).toBe('respond-to-planning-comment'); + expect(result?.workItemId).toBe(ISSUE_IDENTIFIER); + expect(result?.workItemUrl).toBe('https://linear.app/org/issue/TEAM-99'); + expect(result?.agentInput.workItemId).toBe(ISSUE_IDENTIFIER); + expect(result?.agentInput.triggerEvent).toBe('pm:comment-mention'); + }); + + it('includes triggerCommentText in agentInput', async () => { + const body = `@[Bot](${BOT_USER_ID}) please implement feature X`; + const result = await trigger.handle(buildCtx({ commentBody: body })); + + expect(result?.agentInput.triggerCommentText).toBe(body); + }); + + it('includes commentAuthorId in agentInput', async () => { + const result = await trigger.handle(buildCtx({ commentAuthorId: OTHER_USER_ID })); + + expect(result?.agentInput.triggerCommentAuthor).toBe(OTHER_USER_ID); + }); + + it('returns null when trigger is disabled', async () => { + vi.mocked(checkTriggerEnabled).mockResolvedValue(false); + + const result = await trigger.handle(buildCtx()); + + expect(result).toBeNull(); + expect(checkTriggerEnabled).toHaveBeenCalledWith( + 'proj-linear', + 'respond-to-planning-comment', + 'pm:comment-mention', + 'linear-comment-mention', + ); + }); + + it('returns null when issueIdentifier is missing', async () => { + const result = await trigger.handle(buildCtx({ noIssue: true })); + expect(result).toBeNull(); + }); + + it('returns null when commentBody is missing', async () => { + const ctx = buildCtx({ commentBody: '' }); + const data = ctx.payload as Record; + (data.data as Record).body = ''; + const result = await trigger.handle(ctx); + expect(result).toBeNull(); + }); + + it('returns null when bot userId cannot be resolved', async () => { + mockResolveLinearBotUserId.mockResolvedValue(null); + + const result = await trigger.handle(buildCtx()); + + expect(result).toBeNull(); + }); + + it('returns null when comment is self-authored by the bot', async () => { + // Comment author is the bot itself + const result = await trigger.handle(buildCtx({ commentAuthorId: BOT_USER_ID })); + expect(result).toBeNull(); + }); + + it('returns null when comment body does not @mention the bot', async () => { + // No bot userId in the body + const result = await trigger.handle( + buildCtx({ commentBody: 'Just a regular comment, no mention' }), + ); + expect(result).toBeNull(); + }); + + it('includes linearIssueId in agentInput', async () => { + const result = await trigger.handle(buildCtx({ issueId: 'issue-uuid-99' })); + + expect(result?.agentInput.linearIssueId).toBe('issue-uuid-99'); + }); + + it('uses issue.id as fallback when identifier is missing', async () => { + const ctx = buildCtx(); + const data = ctx.payload as Record; + (data.data as Record).issue = { + id: 'fallback-issue-id', + // no identifier + url: 'https://linear.app/org/issue/fallback', + }; + const result = await trigger.handle(ctx); + expect(result?.workItemId).toBe('fallback-issue-id'); + }); + + it('resolves botUserId using the project ID', async () => { + await trigger.handle(buildCtx()); + + expect(mockResolveLinearBotUserId).toHaveBeenCalledWith('proj-linear'); + }); + + it('workItemTitle is undefined (not available in comment webhook)', async () => { + const result = await trigger.handle(buildCtx()); + expect(result?.workItemTitle).toBeUndefined(); + }); + + it('uses issueId from data.issueId when issue is present in data', async () => { + const ctx = buildCtx({ issueId: 'issue-uuid-99' }); + const result = await trigger.handle(ctx); + expect(result?.agentInput.linearIssueId).toBe('issue-uuid-99'); + }); + }); +}); diff --git a/tests/unit/triggers/linear-label-added.test.ts b/tests/unit/triggers/linear-label-added.test.ts new file mode 100644 index 00000000..6427aefb --- /dev/null +++ b/tests/unit/triggers/linear-label-added.test.ts @@ -0,0 +1,298 @@ +import { beforeEach, describe, expect, it, vi } from 'vitest'; +import { mockLogger, mockTriggerCheckModule } from '../../helpers/sharedMocks.js'; + +vi.mock('../../../src/utils/logging.js', () => ({ logger: mockLogger })); +vi.mock('../../../src/triggers/shared/trigger-check.js', () => mockTriggerCheckModule); + +const mockGetLinearConfig = vi.fn(); +vi.mock('../../../src/pm/config.js', () => ({ + getLinearConfig: (...args: unknown[]) => mockGetLinearConfig(...args), +})); + +// Mock resolveProjectPMConfig to avoid pmRegistry bootstrap side effects +const mockResolveProjectPMConfig = vi.fn(); +vi.mock('../../../src/pm/lifecycle.js', () => ({ + resolveProjectPMConfig: (...args: unknown[]) => mockResolveProjectPMConfig(...args), +})); + +import { LinearReadyToProcessLabelTrigger } from '../../../src/triggers/linear/label-added.js'; +import { checkTriggerEnabled } from '../../../src/triggers/shared/trigger-check.js'; +import type { TriggerContext } from '../../../src/types/index.js'; + +// --------------------------------------------------------------------------- +// Helpers +// --------------------------------------------------------------------------- + +const baseLinearConfig = { + teamId: 'team-abc', + statuses: { + splitting: 'state-splitting', + planning: 'state-planning', + todo: 'state-todo', + backlog: 'state-backlog', + done: 'state-done', + }, +}; + +const baseProjectPMConfig = { + labels: { + processing: 'cascade-processing', + processed: 'cascade-processed', + error: 'cascade-error', + readyToProcess: 'cascade-ready', + auto: 'cascade-auto', + }, + statuses: { + backlog: 'state-backlog', + inProgress: 'state-in-progress', + done: 'state-done', + }, +}; + +const mockProject = { + id: 'proj-linear', + orgId: 'org-1', + name: 'Linear Project', + repo: 'owner/repo', + baseBranch: 'main', + branchPrefix: 'feature/', + pm: { type: 'linear' as const }, +} as TriggerContext['project']; + +function buildCtx( + overrides: { + source?: TriggerContext['source']; + action?: string; + type?: string; + labelName?: string; + labelId?: string; + issueStateId?: string; + issueIdentifier?: string; + issueId?: string; + issueUrl?: string; + readyToProcessLabel?: string; + noLinearConfig?: boolean; + } = {}, +): TriggerContext { + return { + project: mockProject, + source: overrides.source ?? 'linear', + payload: { + action: overrides.action ?? 'create', + type: overrides.type ?? 'IssueLabel', + organizationId: 'org-123', + webhookTimestamp: Date.now(), + data: { + id: 'issuelabel-uuid', + issueId: 'issue-uuid', + labelId: overrides.labelId ?? 'label-cascade-ready', + label: { + id: overrides.labelId ?? 'label-cascade-ready', + name: overrides.labelName ?? 'cascade-ready', + }, + issue: { + id: overrides.issueId ?? 'issue-uuid', + identifier: overrides.issueIdentifier ?? 'TEAM-123', + title: 'Fix the bug', + teamId: 'team-abc', + url: overrides.issueUrl ?? 'https://linear.app/org/issue/TEAM-123', + stateId: overrides.issueStateId ?? 'state-todo', + }, + }, + url: 'https://linear.app', + }, + }; +} + +// --------------------------------------------------------------------------- +// Tests +// --------------------------------------------------------------------------- + +describe('LinearReadyToProcessLabelTrigger', () => { + let trigger: LinearReadyToProcessLabelTrigger; + + beforeEach(() => { + vi.resetAllMocks(); + vi.mocked(checkTriggerEnabled).mockResolvedValue(true); + mockGetLinearConfig.mockReturnValue(baseLinearConfig); + mockResolveProjectPMConfig.mockReturnValue(baseProjectPMConfig); + trigger = new LinearReadyToProcessLabelTrigger(); + }); + + // ========================================================================= + // matches + // ========================================================================= + describe('matches', () => { + it('matches create/IssueLabel events with the ready-to-process label', () => { + expect(trigger.matches(buildCtx())).toBe(true); + }); + + it('does not match non-linear source', () => { + expect(trigger.matches(buildCtx({ source: 'jira' }))).toBe(false); + }); + + it('does not match non-create actions', () => { + expect(trigger.matches(buildCtx({ action: 'update' }))).toBe(false); + }); + + it('does not match non-IssueLabel types', () => { + expect(trigger.matches(buildCtx({ type: 'Issue' }))).toBe(false); + }); + + it('does not match when label name does not match readyToProcess', () => { + expect(trigger.matches(buildCtx({ labelName: 'some-other-label' }))).toBe(false); + }); + + it('does not match when label name is absent (early return on falsy labelName)', () => { + // The source code checks `if (!labelName) return false` before comparing labelId, + // so missing label.name always causes non-match + const ctx = buildCtx({ labelId: 'cascade-ready', labelName: 'cascade-ready' }); + const data = ctx.payload as Record; + (data.data as Record).label = { id: 'cascade-ready', name: undefined }; + expect(trigger.matches(ctx)).toBe(false); + }); + + it('does not match when readyToProcess label is not configured', () => { + mockResolveProjectPMConfig.mockReturnValue({ + labels: { processing: 'cascade-processing' }, // no readyToProcess + statuses: {}, + }); + expect(trigger.matches(buildCtx())).toBe(false); + }); + + it('does not match when data.label is missing', () => { + const ctx = buildCtx(); + const data = ctx.payload as Record; + (data.data as Record).label = undefined; + expect(trigger.matches(ctx)).toBe(false); + }); + + it('matches with a custom readyToProcess label name', () => { + mockResolveProjectPMConfig.mockReturnValue({ + labels: { readyToProcess: 'my-custom-ready-label' }, + statuses: {}, + }); + expect(trigger.matches(buildCtx({ labelName: 'my-custom-ready-label' }))).toBe(true); + }); + }); + + // ========================================================================= + // handle + // ========================================================================= + describe('handle', () => { + it('returns implementation agent when issue state maps to "todo"', async () => { + const result = await trigger.handle(buildCtx({ issueStateId: 'state-todo' })); + + expect(result).not.toBeNull(); + expect(result?.agentType).toBe('implementation'); + expect(result?.workItemId).toBe('TEAM-123'); + expect(result?.workItemUrl).toBe('https://linear.app/org/issue/TEAM-123'); + expect(result?.agentInput.workItemId).toBe('TEAM-123'); + expect(result?.agentInput.triggerEvent).toBe('pm:label-added'); + }); + + it('returns splitting agent when issue state maps to "splitting"', async () => { + const result = await trigger.handle(buildCtx({ issueStateId: 'state-splitting' })); + + expect(result).not.toBeNull(); + expect(result?.agentType).toBe('splitting'); + }); + + it('returns planning agent when issue state maps to "planning"', async () => { + const result = await trigger.handle(buildCtx({ issueStateId: 'state-planning' })); + + expect(result).not.toBeNull(); + expect(result?.agentType).toBe('planning'); + }); + + it('returns null when issue state does not map to any agent', async () => { + const result = await trigger.handle(buildCtx({ issueStateId: 'state-done' })); + expect(result).toBeNull(); + }); + + it('returns null when issue identifier is missing', async () => { + const ctx = buildCtx(); + const data = ctx.payload as Record; + (data.data as Record).issue = undefined; + const result = await trigger.handle(ctx); + expect(result).toBeNull(); + }); + + it('returns null when issue stateId is missing', async () => { + const ctx = buildCtx(); + const data = ctx.payload as Record; + (data.data as Record).issue = { + id: 'issue-uuid', + identifier: 'TEAM-123', + // no stateId + }; + const result = await trigger.handle(ctx); + expect(result).toBeNull(); + }); + + it('returns null when linear config has no statuses', async () => { + mockGetLinearConfig.mockReturnValue({ teamId: 'team-abc' }); // no statuses + const result = await trigger.handle(buildCtx()); + expect(result).toBeNull(); + }); + + it('returns null when linear config is missing', async () => { + mockGetLinearConfig.mockReturnValue(undefined); + const result = await trigger.handle(buildCtx()); + expect(result).toBeNull(); + }); + + it('returns null when trigger is disabled for the resolved agent', async () => { + vi.mocked(checkTriggerEnabled).mockResolvedValue(false); + + const result = await trigger.handle(buildCtx({ issueStateId: 'state-todo' })); + + expect(result).toBeNull(); + expect(checkTriggerEnabled).toHaveBeenCalledWith( + 'proj-linear', + 'implementation', + 'pm:label-added', + 'linear-ready-to-process-label-added', + ); + }); + + it('calls checkTriggerEnabled with correct args for splitting', async () => { + vi.mocked(checkTriggerEnabled).mockResolvedValue(true); + + await trigger.handle(buildCtx({ issueStateId: 'state-splitting' })); + + expect(checkTriggerEnabled).toHaveBeenCalledWith( + 'proj-linear', + 'splitting', + 'pm:label-added', + 'linear-ready-to-process-label-added', + ); + }); + + it('includes linearIssueId in agentInput', async () => { + const result = await trigger.handle( + buildCtx({ issueStateId: 'state-todo', issueId: 'issue-uuid-xyz' }), + ); + + expect(result?.agentInput.linearIssueId).toBe('issue-uuid-xyz'); + }); + + it('falls back to issue.id when identifier is missing', async () => { + const ctx = buildCtx({ issueStateId: 'state-todo' }); + const data = ctx.payload as Record; + (data.data as Record).issue = { + id: 'fallback-id', + // no identifier + stateId: 'state-todo', + url: 'https://linear.app/org/issue/fallback', + }; + const result = await trigger.handle(ctx); + expect(result?.workItemId).toBe('fallback-id'); + }); + + it('workItemTitle is undefined (not included in IssueLabel payload)', async () => { + const result = await trigger.handle(buildCtx({ issueStateId: 'state-todo' })); + expect(result?.workItemTitle).toBeUndefined(); + }); + }); +}); diff --git a/tests/unit/triggers/linear-status-changed.test.ts b/tests/unit/triggers/linear-status-changed.test.ts new file mode 100644 index 00000000..7a4c99ef --- /dev/null +++ b/tests/unit/triggers/linear-status-changed.test.ts @@ -0,0 +1,256 @@ +import { beforeEach, describe, expect, it, vi } from 'vitest'; +import { mockLogger, mockTriggerCheckModule } from '../../helpers/sharedMocks.js'; + +vi.mock('../../../src/utils/logging.js', () => ({ logger: mockLogger })); +vi.mock('../../../src/triggers/shared/trigger-check.js', () => mockTriggerCheckModule); + +const mockGetLinearConfig = vi.fn(); +vi.mock('../../../src/pm/config.js', () => ({ + getLinearConfig: (...args: unknown[]) => mockGetLinearConfig(...args), +})); + +import { LinearStatusChangedTrigger } from '../../../src/triggers/linear/status-changed.js'; +import { checkTriggerEnabled } from '../../../src/triggers/shared/trigger-check.js'; +import type { TriggerContext } from '../../../src/types/index.js'; + +// --------------------------------------------------------------------------- +// Helpers +// --------------------------------------------------------------------------- + +const baseLinearConfig = { + teamId: 'team-abc', + statuses: { + splitting: 'state-splitting', + planning: 'state-planning', + todo: 'state-todo', + backlog: 'state-backlog', + done: 'state-done', + }, +}; + +const mockProject = { + id: 'proj-linear', + orgId: 'org-1', + name: 'Linear Project', + repo: 'owner/repo', + baseBranch: 'main', + branchPrefix: 'feature/', + pm: { type: 'linear' as const }, + linear: baseLinearConfig, +} as TriggerContext['project']; + +function buildCtx( + overrides: { + source?: TriggerContext['source']; + action?: string; + type?: string; + newStateId?: string; + previousStateId?: string; + issueIdentifier?: string; + issueId?: string; + issueTitle?: string; + issueUrl?: string; + noUpdatedFrom?: boolean; + noLinearConfig?: boolean; + } = {}, +): TriggerContext { + const project = overrides.noLinearConfig ? { ...mockProject, linear: undefined } : mockProject; + + return { + project: project as TriggerContext['project'], + source: overrides.source ?? 'linear', + payload: { + action: overrides.action ?? 'update', + type: overrides.type ?? 'Issue', + organizationId: 'org-123', + webhookTimestamp: Date.now(), + data: { + id: overrides.issueId ?? 'issue-uuid', + identifier: overrides.issueIdentifier ?? 'TEAM-123', + title: overrides.issueTitle ?? 'Fix the bug', + url: overrides.issueUrl ?? 'https://linear.app/org/issue/TEAM-123', + stateId: overrides.newStateId ?? 'state-todo', + teamId: 'team-abc', + }, + ...(overrides.noUpdatedFrom + ? {} + : { + updatedFrom: { + stateId: overrides.previousStateId ?? 'state-backlog', + }, + }), + url: 'https://linear.app/org/issue/TEAM-123', + }, + }; +} + +// --------------------------------------------------------------------------- +// Tests +// --------------------------------------------------------------------------- + +describe('LinearStatusChangedTrigger', () => { + let trigger: LinearStatusChangedTrigger; + + beforeEach(() => { + vi.resetAllMocks(); + vi.mocked(checkTriggerEnabled).mockResolvedValue(true); + mockGetLinearConfig.mockReturnValue(baseLinearConfig); + trigger = new LinearStatusChangedTrigger(); + }); + + // ========================================================================= + // matches + // ========================================================================= + describe('matches', () => { + it('matches update/Issue events with stateId change in updatedFrom', () => { + expect(trigger.matches(buildCtx())).toBe(true); + }); + + it('does not match non-linear source', () => { + expect(trigger.matches(buildCtx({ source: 'jira' }))).toBe(false); + }); + + it('does not match non-update actions', () => { + expect(trigger.matches(buildCtx({ action: 'create' }))).toBe(false); + }); + + it('does not match non-Issue types', () => { + expect(trigger.matches(buildCtx({ type: 'Comment' }))).toBe(false); + }); + + it('does not match when updatedFrom is missing', () => { + expect(trigger.matches(buildCtx({ noUpdatedFrom: true }))).toBe(false); + }); + + it('does not match when updatedFrom.stateId is not a string', () => { + const ctx = buildCtx(); + (ctx.payload as Record).updatedFrom = { stateId: 123 }; + expect(trigger.matches(ctx)).toBe(false); + }); + + it('does not match IssueLabel type', () => { + expect(trigger.matches(buildCtx({ type: 'IssueLabel' }))).toBe(false); + }); + }); + + // ========================================================================= + // handle + // ========================================================================= + describe('handle', () => { + it('returns implementation agent when new state maps to "todo"', async () => { + const result = await trigger.handle(buildCtx({ newStateId: 'state-todo' })); + + expect(result).not.toBeNull(); + expect(result?.agentType).toBe('implementation'); + expect(result?.workItemId).toBe('TEAM-123'); + expect(result?.workItemTitle).toBe('Fix the bug'); + expect(result?.workItemUrl).toBe('https://linear.app/org/issue/TEAM-123'); + expect(result?.agentInput.workItemId).toBe('TEAM-123'); + expect(result?.agentInput.triggerEvent).toBe('pm:status-changed'); + }); + + it('returns splitting agent when new state maps to "splitting"', async () => { + const result = await trigger.handle(buildCtx({ newStateId: 'state-splitting' })); + + expect(result).not.toBeNull(); + expect(result?.agentType).toBe('splitting'); + }); + + it('returns planning agent when new state maps to "planning"', async () => { + const result = await trigger.handle(buildCtx({ newStateId: 'state-planning' })); + + expect(result).not.toBeNull(); + expect(result?.agentType).toBe('planning'); + }); + + it('returns backlog-manager agent when new state maps to "backlog"', async () => { + const result = await trigger.handle(buildCtx({ newStateId: 'state-backlog' })); + + expect(result).not.toBeNull(); + expect(result?.agentType).toBe('backlog-manager'); + }); + + it('returns null when new state does not map to any agent', async () => { + const result = await trigger.handle(buildCtx({ newStateId: 'state-done' })); + expect(result).toBeNull(); + }); + + it('returns null when newStateId is missing from data', async () => { + const ctx = buildCtx(); + (ctx.payload as Record).data = { + identifier: 'TEAM-1', + // no stateId + }; + const result = await trigger.handle(ctx); + expect(result).toBeNull(); + }); + + it('returns null when issueIdentifier is missing', async () => { + const ctx = buildCtx(); + (ctx.payload as Record).data = { + stateId: 'state-todo', + // no identifier or id + }; + const result = await trigger.handle(ctx); + expect(result).toBeNull(); + }); + + it('returns null when linear config is missing statuses', async () => { + mockGetLinearConfig.mockReturnValue({ teamId: 'team-abc' }); // no statuses + const result = await trigger.handle(buildCtx()); + expect(result).toBeNull(); + }); + + it('returns null when linear config is missing entirely', async () => { + mockGetLinearConfig.mockReturnValue(undefined); + const result = await trigger.handle(buildCtx()); + expect(result).toBeNull(); + }); + + it('returns null when trigger is disabled for the resolved agent', async () => { + vi.mocked(checkTriggerEnabled).mockResolvedValue(false); + + const result = await trigger.handle(buildCtx({ newStateId: 'state-todo' })); + + expect(result).toBeNull(); + expect(checkTriggerEnabled).toHaveBeenCalledWith( + 'proj-linear', + 'implementation', + 'pm:status-changed', + 'linear-status-changed', + ); + }); + + it('calls checkTriggerEnabled with correct args for splitting agent', async () => { + vi.mocked(checkTriggerEnabled).mockResolvedValue(true); + + await trigger.handle(buildCtx({ newStateId: 'state-splitting' })); + + expect(checkTriggerEnabled).toHaveBeenCalledWith( + 'proj-linear', + 'splitting', + 'pm:status-changed', + 'linear-status-changed', + ); + }); + + it('includes linearIssueId in agentInput', async () => { + const result = await trigger.handle( + buildCtx({ newStateId: 'state-todo', issueId: 'issue-uuid-123' }), + ); + + expect(result?.agentInput.linearIssueId).toBe('issue-uuid-123'); + }); + + it('falls back to id when identifier is missing', async () => { + const ctx = buildCtx({ newStateId: 'state-todo' }); + const data = ctx.payload as Record; + (data.data as Record).identifier = undefined; + (data.data as Record).id = 'fallback-id'; + + const result = await trigger.handle(ctx); + + expect(result?.workItemId).toBe('fallback-id'); + }); + }); +}); From bcc16e869b0ab708623f806fddde579550725a3d Mon Sep 17 00:00:00 2001 From: aaight Date: Tue, 14 Apr 2026 23:04:04 +0200 Subject: [PATCH 32/52] docs(linear): update documentation and prompt context for Linear PM integration (#1102) Co-authored-by: Cascade Bot --- CLAUDE.md | 55 ++++++++++-- docs/architecture/06-integration-layer.md | 21 +++-- src/integrations/README.md | 9 +- .../unit/agents/shared/promptContext.test.ts | 84 +++++++++++++++++++ 4 files changed, 152 insertions(+), 17 deletions(-) diff --git a/CLAUDE.md b/CLAUDE.md index b4c68f24..b6cc7127 100644 --- a/CLAUDE.md +++ b/CLAUDE.md @@ -26,12 +26,12 @@ CASCADE runs as three services (no monolithic server mode): The extensible trigger system routes events to agents: ``` -Trello/JIRA/Sentry/GitHub Webhook → Router → Redis/BullMQ → Worker → TriggerRegistry → Agent → Code Changes → PR +Trello/JIRA/Linear/Sentry/GitHub Webhook → Router → Redis/BullMQ → Worker → TriggerRegistry → Agent → Code Changes → PR ``` - `src/router/` - Webhook receiver (enqueues jobs to Redis) - `src/webhook/` - Shared webhook handler factory, parsers, and logging -- `src/triggers/` - Event handlers (Trello/JIRA card moves, labels, GitHub PRs, Sentry alerts) +- `src/triggers/` - Event handlers (Trello/JIRA/Linear issue moves, labels, GitHub PRs, Sentry alerts) - `src/agents/` - AI agents (splitting, planning, implementation, review, debug, alerting, backlog-manager, resolve-conflicts) - `src/gadgets/` - Tools agents can use (PM/SCM/alerting operations, Tmux, Todo, file system) @@ -123,6 +123,7 @@ Lefthook runs pre-commit (lint, typecheck) and pre-push (unit tests, integration - `src/github/` - GitHub client, dual-persona model (personas.ts) - `src/trello/` - Trello API client - `src/jira/` - JIRA API client +- `src/linear/` - Linear API client - `src/sentry/` - Sentry API client and integration - `src/utils/` - Utilities (logging, repo cloning, lifecycle) - `web/` - Dashboard frontend (React 19, Vite, Tailwind v4, TanStack Router) @@ -140,7 +141,7 @@ provider-specific branching. | Category | Interface | Example providers | |----------|-----------|-------------------| -| `pm` | `PMIntegration` (extends `IntegrationModule`) | Trello, JIRA | +| `pm` | `PMIntegration` (extends `IntegrationModule`) | Trello, JIRA, Linear | | `scm` | `SCMIntegration` (extends `IntegrationModule`) | GitHub | | `alerting` | `AlertingIntegration` (extends `IntegrationModule`) | Sentry | @@ -178,7 +179,7 @@ for `hasIntegration()` to return `true`. ### Bootstrap -`src/integrations/bootstrap.ts` is the single registration point for all four built-in +`src/integrations/bootstrap.ts` is the single registration point for all five built-in integrations. It is safe to import from both the router and worker — it does not pull in the agent execution pipeline or template files. @@ -208,7 +209,7 @@ Optional (infrastructure): - `SENTRY_RELEASE` - Release identifier for source maps (e.g., git SHA) - `SENTRY_TRACES_SAMPLE_RATE` - Trace sampling rate 0.0-1.0 (default: 0.1) -**Project credentials** (`GITHUB_TOKEN_IMPLEMENTER`, `GITHUB_TOKEN_REVIEWER`, `TRELLO_API_KEY`, `TRELLO_TOKEN`, LLM API keys) are stored in the `project_credentials` table — project-scoped, encrypted at rest when `CREDENTIAL_MASTER_KEY` is set. All credentials (integration tokens and LLM keys) use the same `project_credentials` table keyed by `(projectId, envVarKey)`. There is no env var fallback — the database is the sole source of truth for project-scoped secrets. +**Project credentials** (`GITHUB_TOKEN_IMPLEMENTER`, `GITHUB_TOKEN_REVIEWER`, `TRELLO_API_KEY`, `TRELLO_TOKEN`, `LINEAR_API_KEY`, `LINEAR_WEBHOOK_SECRET`, LLM API keys) are stored in the `project_credentials` table — project-scoped, encrypted at rest when `CREDENTIAL_MASTER_KEY` is set. All credentials (integration tokens and LLM keys) use the same `project_credentials` table keyed by `(projectId, envVarKey)`. There is no env var fallback — the database is the sole source of truth for project-scoped secrets. ## Database Configuration @@ -218,8 +219,8 @@ CASCADE stores all project configuration in PostgreSQL. The `config/projects.jso - `organizations` - Organization definitions (multi-tenant support) - `projects` - Per-project config (repo, base branch, budget, engine, and per-project overrides for model, iterations, timeouts, progress model/interval, `run_links_enabled`, `max_in_flight_items`) -- `project_integrations` - Integration configs per project with `category` (pm/scm), `provider` (trello/jira/github), `config` JSONB, and `triggers` JSONB. One PM + one SCM per project (enforced by unique constraint) -- `project_credentials` - Project-scoped credentials keyed by `(projectId, envVarKey)`. Stores all credential types (GitHub tokens, Trello keys, JIRA tokens, LLM API keys). Encrypted at rest when `CREDENTIAL_MASTER_KEY` is set +- `project_integrations` - Integration configs per project with `category` (pm/scm), `provider` (trello/jira/linear/github), `config` JSONB, and `triggers` JSONB. One PM + one SCM per project (enforced by unique constraint) +- `project_credentials` - Project-scoped credentials keyed by `(projectId, envVarKey)`. Stores all credential types (GitHub tokens, Trello keys, JIRA tokens, Linear API keys, LLM API keys). Encrypted at rest when `CREDENTIAL_MASTER_KEY` is set - `agent_configs` - Per-agent-type overrides (model, iterations, engine, `agent_engine_settings`, max_concurrency, `system_prompt`, `task_prompt`), project-scoped only (`project_id NOT NULL`) - `agent_definitions` - Agent YAML definitions (built-in and custom). Each row stores the full definition JSONB, keyed by `agent_type` - `agent_trigger_configs` - Configured trigger events per project/agent pair (replaces legacy `project_integrations.triggers`) @@ -380,7 +381,7 @@ cascade projects trigger-set --agent review --event scm:review-requ | Event | Providers | Description | |-------|-----------|-------------| -| `pm:status-changed` | Trello, JIRA | Trigger when card/issue moves to agent's target status | +| `pm:status-changed` | Trello, JIRA, Linear | Trigger when card/issue moves to agent's target status | | `pm:label-added` | All | Trigger when Ready to Process label is added | ```bash @@ -453,6 +454,44 @@ cascade projects update --agent-engine opencode The OpenCode engine is implemented in `src/backends/opencode/`. Configure with `cascade agents create --engine opencode` or via the Agent Configs tab in the dashboard. +## Linear Integration + +CASCADE supports Linear as a PM provider. When Linear issues change state or labels are applied, they are routed to the appropriate agents. + +- **Linear client**: `src/linear/` — API client and type definitions +- **PM integration**: `src/pm/linear/integration.ts` — implements `PMIntegration` +- **Triggers**: `src/triggers/linear/` — `status-changed.ts`, `label-added.ts`, `comment-mention.ts` +- **Webhook route**: `/linear/webhook` in `src/router/index.ts` + +### Credentials + +Store Linear credentials via the dashboard (Project Settings > Credentials tab) or CLI: + +```bash +cascade projects credentials-set --key LINEAR_API_KEY --value lin_api_... +cascade projects credentials-set --key LINEAR_WEBHOOK_SECRET --value # optional +``` + +### Configuration + +Configure a project to use Linear as its PM provider: + +```bash +cascade projects integration-set --category pm --provider linear \ + --config '{"teamId":"TEAM_ID","statuses":{"todo":"Todo","inProgress":"In Progress","inReview":"In Review","done":"Done"}}' +``` + +Linear uses **issue identifiers** (`TEAM-123` format) as work item IDs. Issues belong to **teams** (equivalent to Trello boards or JIRA projects). + +### Webhook Setup + +Register your CASCADE webhook URL with Linear in your team settings: +``` +https:///linear/webhook +``` + +If `LINEAR_WEBHOOK_SECRET` is configured, the router verifies the `Linear-Signature` header on incoming payloads. + ## Sentry / Alerting Integration CASCADE integrates with Sentry for alert-driven automation. When Sentry issues or metric alerts arrive, they are routed to the `alerting` agent. diff --git a/docs/architecture/06-integration-layer.md b/docs/architecture/06-integration-layer.md index 8cc8bf4d..c9939274 100644 --- a/docs/architecture/06-integration-layer.md +++ b/docs/architecture/06-integration-layer.md @@ -10,7 +10,7 @@ The base contract for all integrations: ```typescript interface IntegrationModule { - readonly type: string; // 'trello', 'jira', 'github', 'sentry' + readonly type: string; // 'trello', 'jira', 'linear', 'github', 'sentry' readonly category: IntegrationCategory; // 'pm' | 'scm' | 'alerting' withCredentials(projectId: string, fn: () => Promise): Promise; @@ -74,12 +74,13 @@ const integrationRegistry: IntegrationRegistry; // singleton `src/integrations/bootstrap.ts` -Single, idempotent registration point for all four built-in integrations. Safe to import from router, worker, and dashboard — it does not pull in the agent execution pipeline or template files. +Single, idempotent registration point for all five built-in integrations. Safe to import from router, worker, and dashboard — it does not pull in the agent execution pipeline or template files. ``` -TrelloIntegration → integrationRegistry + pmRegistry -JiraIntegration → integrationRegistry + pmRegistry -GitHubSCMIntegration → integrationRegistry +TrelloIntegration → integrationRegistry + pmRegistry +JiraIntegration → integrationRegistry + pmRegistry +LinearIntegration → integrationRegistry + pmRegistry +GitHubSCMIntegration → integrationRegistry SentryAlertingIntegration → integrationRegistry ``` @@ -93,6 +94,7 @@ Each provider declares its credential roles — the mapping from logical role na |----------|----------|---------------|----------------| | Trello | pm | `api_key` → `TRELLO_API_KEY`, `token` → `TRELLO_TOKEN` | `api_secret` | | JIRA | pm | `email` → `JIRA_EMAIL`, `api_token` → `JIRA_API_TOKEN` | `webhook_secret` | +| Linear | pm | `api_key` → `LINEAR_API_KEY` | `webhook_secret` → `LINEAR_WEBHOOK_SECRET` | | GitHub | scm | `implementer_token` → `GITHUB_TOKEN_IMPLEMENTER`, `reviewer_token` → `GITHUB_TOKEN_REVIEWER` | `webhook_secret` | | Sentry | alerting | `api_token` → `SENTRY_API_TOKEN` | `webhook_secret` | @@ -115,6 +117,15 @@ Each provider declares its credential roles — the mapping from logical role na - Status transitions via JIRA transition ID lookup - Issue key extraction via regex: `[A-Z][A-Z0-9]+-\d+` +### Linear (`src/pm/linear/`, `src/linear/`) + +- `LinearIntegration` implements `PMIntegration` +- `LinearPMProvider` implements `PMProvider` (issue CRUD, comments, labels, state transitions) +- `linearClient` — GraphQL/REST client with AsyncLocalStorage credential scoping +- Status transitions via Linear state ID lookup +- Issue identifier extraction via regex: `[A-Z][A-Z0-9]*-\d+` (e.g. `TEAM-123`) +- Work item URL format: `https://linear.app//issue/` + ### GitHub (`src/github/`) - `GitHubSCMIntegration` implements `SCMIntegration` diff --git a/src/integrations/README.md b/src/integrations/README.md index af38d22c..9a1b6d0c 100644 --- a/src/integrations/README.md +++ b/src/integrations/README.md @@ -14,7 +14,7 @@ branching in shared code. ``` IntegrationModule (base contract) -├── PMIntegration — project management (Trello, JIRA) +├── PMIntegration — project management (Trello, JIRA, Linear) ├── SCMIntegration — source control (GitHub) └── AlertingIntegration — monitoring/alerting (Sentry) ``` @@ -27,7 +27,7 @@ IntegrationModule (base contract) | `src/integrations/registry.ts` | `IntegrationRegistry` class + `integrationRegistry` singleton | | `src/integrations/scm.ts` | `SCMIntegration` interface (SCM-specific extension) | | `src/integrations/alerting.ts` | `AlertingIntegration` interface (alerting-specific extension) | -| `src/integrations/bootstrap.ts` | **One place** — registers all 4 built-in integrations | +| `src/integrations/bootstrap.ts` | **One place** — registers all 5 built-in integrations | | `src/integrations/index.ts` | Public barrel exports | | `src/pm/integration.ts` | `PMIntegration` interface (PM-specific extension) | | `src/pm/registry.ts` | `PMIntegrationRegistry` singleton (PM-specific; backward compat) | @@ -95,8 +95,8 @@ Implementation: `src/sentry/alerting-integration.ts` (`SentryAlertingIntegration ## Adding a new integration — step by step -The example below adds a hypothetical **Linear** PM integration. Adapt the names for your actual -provider and category. +The example below uses **Linear** as a PM integration (already implemented — see +`src/pm/linear/integration.ts`). Adapt the names for your actual provider and category. ### Step 1 — Implement the interface @@ -458,5 +458,6 @@ Before submitting a new integration: |----------|----------|-------------|---------|---------| | `trello` | pm | `src/pm/trello/integration.ts` | `src/router/adapters/trello.ts` | `src/triggers/trello/` | | `jira` | pm | `src/pm/jira/integration.ts` | `src/router/adapters/jira.ts` | `src/triggers/jira/` | +| `linear` | pm | `src/pm/linear/integration.ts` | `src/router/adapters/linear.ts` | `src/triggers/linear/` | | `github` | scm | `src/github/scm-integration.ts` | `src/router/adapters/github.ts` | `src/triggers/github/` | | `sentry` | alerting | `src/sentry/alerting-integration.ts` | `src/router/adapters/sentry.ts` | `src/triggers/sentry/` | diff --git a/tests/unit/agents/shared/promptContext.test.ts b/tests/unit/agents/shared/promptContext.test.ts index d445094a..c7ce91c2 100644 --- a/tests/unit/agents/shared/promptContext.test.ts +++ b/tests/unit/agents/shared/promptContext.test.ts @@ -199,6 +199,90 @@ describe('buildPromptContext', () => { }); }); + describe('with Linear provider', () => { + beforeEach(() => { + const mockProvider = createMockPMProvider(); + mockProvider.type = 'linear' as never; + mockProvider.getWorkItemUrl = vi.fn((id: string) => `https://linear.app/myorg/issue/${id}`); + mockGetPMProvider.mockReturnValue(mockProvider); + }); + + it('sets workItemNoun to "issue" for Linear', () => { + const ctx = buildPromptContext('TEAM-123', makeProject() as never); + expect(ctx.workItemNoun).toBe('issue'); + }); + + it('sets workItemNounPlural to "issues" for Linear', () => { + const ctx = buildPromptContext('TEAM-123', makeProject() as never); + expect(ctx.workItemNounPlural).toBe('issues'); + }); + + it('sets workItemNounCap to "Issue" for Linear', () => { + const ctx = buildPromptContext('TEAM-123', makeProject() as never); + expect(ctx.workItemNounCap).toBe('Issue'); + }); + + it('sets workItemNounPluralCap to "Issues" for Linear', () => { + const ctx = buildPromptContext('TEAM-123', makeProject() as never); + expect(ctx.workItemNounPluralCap).toBe('Issues'); + }); + + it('sets pmName to "Linear"', () => { + const ctx = buildPromptContext('TEAM-123', makeProject() as never); + expect(ctx.pmName).toBe('Linear'); + }); + + it('sets pmType to "linear"', () => { + const ctx = buildPromptContext('TEAM-123', makeProject() as never); + expect(ctx.pmType).toBe('linear'); + }); + + it('generates workItemUrl from provider using Linear issue URL format', () => { + const ctx = buildPromptContext('TEAM-123', makeProject() as never); + expect(ctx.workItemUrl).toBe('https://linear.app/myorg/issue/TEAM-123'); + }); + + it('sets pipeline list IDs from Linear statuses', () => { + const linearProject = makeProject({ + trello: undefined, + pm: { type: 'linear' }, + linear: { + teamId: 'team-abc', + statuses: { + backlog: 'Backlog', + todo: 'Todo', + inProgress: 'In Progress', + inReview: 'In Review', + done: 'Done', + merged: 'Merged', + }, + }, + }); + const ctx = buildPromptContext('TEAM-1', linearProject as never); + expect(ctx.backlogListId).toBe('Backlog'); + expect(ctx.todoListId).toBe('Todo'); + expect(ctx.inProgressListId).toBe('In Progress'); + expect(ctx.inReviewListId).toBe('In Review'); + expect(ctx.mergedListId).toBe('Merged'); + }); + + it('leaves pipeline list IDs undefined when Linear statuses are missing', () => { + const linearProject = makeProject({ + trello: undefined, + pm: { type: 'linear' }, + linear: { + teamId: 'team-abc', + statuses: {}, + }, + }); + const ctx = buildPromptContext('TEAM-1', linearProject as never); + expect(ctx.backlogListId).toBeUndefined(); + expect(ctx.todoListId).toBeUndefined(); + expect(ctx.inProgressListId).toBeUndefined(); + expect(ctx.inReviewListId).toBeUndefined(); + }); + }); + describe('with prContext', () => { beforeEach(() => { const mockProvider = createMockPMProvider(); From 48eb254d211dc931f3b0a004b17e37c832269ba8 Mon Sep 17 00:00:00 2001 From: aaight Date: Tue, 14 Apr 2026 23:34:23 +0200 Subject: [PATCH 33/52] fix(linear): wire up lookupProject to loadProjectConfigByLinearTeamId (#1103) Co-authored-by: Cascade Bot --- src/pm/linear/integration.ts | 13 +++++++------ tests/unit/pm/linear/integration.test.ts | 20 +++++++++++++++++++- 2 files changed, 26 insertions(+), 7 deletions(-) diff --git a/src/pm/linear/integration.ts b/src/pm/linear/integration.ts index c6985eae..5ca411ad 100644 --- a/src/pm/linear/integration.ts +++ b/src/pm/linear/integration.ts @@ -13,7 +13,11 @@ import { PROVIDER_CREDENTIAL_ROLES, registerCredentialRoles, } from '../../config/integrationRoles.js'; -import { getIntegrationCredential, getIntegrationCredentialOrNull } from '../../config/provider.js'; +import { + getIntegrationCredential, + getIntegrationCredentialOrNull, + loadProjectConfigByLinearTeamId, +} from '../../config/provider.js'; import { getIntegrationProvider } from '../../db/repositories/credentialsRepository.js'; import { withLinearCredentials } from '../../linear/client.js'; import type { CascadeConfig, ProjectConfig } from '../../types/index.js'; @@ -197,12 +201,9 @@ export class LinearIntegration implements PMIntegration { } async lookupProject( - _identifier: string, + identifier: string, ): Promise<{ project: ProjectConfig; config: CascadeConfig } | null> { - // Linear project lookup by teamId is not yet implemented in the config - // repository (separate story). Return null to fall through to other lookup - // mechanisms. - return null; + return (await loadProjectConfigByLinearTeamId(identifier)) ?? null; } extractWorkItemId(text: string): string | null { diff --git a/tests/unit/pm/linear/integration.test.ts b/tests/unit/pm/linear/integration.test.ts index 9b5df82d..9c640910 100644 --- a/tests/unit/pm/linear/integration.test.ts +++ b/tests/unit/pm/linear/integration.test.ts @@ -6,11 +6,14 @@ import { beforeEach, describe, expect, it, vi } from 'vitest'; const mockGetIntegrationCredential = vi.fn(); const mockGetIntegrationCredentialOrNull = vi.fn(); +const mockLoadProjectConfigByLinearTeamId = vi.fn(); vi.mock('../../../../src/config/provider.js', () => ({ getIntegrationCredential: (...args: unknown[]) => mockGetIntegrationCredential(...args), getIntegrationCredentialOrNull: (...args: unknown[]) => mockGetIntegrationCredentialOrNull(...args), + loadProjectConfigByLinearTeamId: (...args: unknown[]) => + mockLoadProjectConfigByLinearTeamId(...args), })); const mockGetIntegrationProvider = vi.fn(); @@ -366,8 +369,23 @@ describe('LinearIntegration', () => { // lookupProject // ========================================================================= describe('lookupProject', () => { - it('returns null (not yet implemented)', async () => { + it('returns the project+config when a matching Linear teamId is found', async () => { + const project = makeProject(); + const config = { version: 1, agents: [] }; + mockLoadProjectConfigByLinearTeamId.mockResolvedValueOnce({ project, config }); + const result = await integration.lookupProject('team-abc'); + + expect(mockLoadProjectConfigByLinearTeamId).toHaveBeenCalledWith('team-abc'); + expect(result).toEqual({ project, config }); + }); + + it('returns null when no project matches the given teamId', async () => { + mockLoadProjectConfigByLinearTeamId.mockResolvedValueOnce(undefined); + + const result = await integration.lookupProject('unknown-team'); + + expect(mockLoadProjectConfigByLinearTeamId).toHaveBeenCalledWith('unknown-team'); expect(result).toBeNull(); }); }); From e431f57b6eda02fdd0f9f2cae05e14afd64926f4 Mon Sep 17 00:00:00 2001 From: aaight Date: Tue, 14 Apr 2026 23:44:04 +0200 Subject: [PATCH 34/52] feat(linear): add getTeams, getTeamWorkflowStates, getTeamLabels discovery methods (#1104) Co-authored-by: Cascade Bot --- src/linear/client.ts | 70 ++++++ tests/unit/pm/linear/client.test.ts | 324 ++++++++++++++++++++++++++++ 2 files changed, 394 insertions(+) create mode 100644 tests/unit/pm/linear/client.test.ts diff --git a/src/linear/client.ts b/src/linear/client.ts index e4a3a429..36469d47 100644 --- a/src/linear/client.ts +++ b/src/linear/client.ts @@ -18,8 +18,10 @@ import type { LinearIssue, LinearLabel, LinearReaction, + LinearTeam, LinearUpdateIssueInput, LinearUser, + LinearWorkflowState, } from './types.js'; const LINEAR_API_URL = 'https://api.linear.app/graphql'; @@ -580,6 +582,74 @@ export const linearClient = { }; }, + // ===== Discovery ===== + + async getTeams(): Promise { + logger.debug('Fetching Linear teams'); + const data = await linearGraphQL<{ teams: { nodes: unknown[] } }>( + `query GetTeams { + teams { + nodes { + ${TEAM_FIELDS} + } + } + }`, + ); + return (data.teams.nodes as RawIssue['team'][]).map(mapTeam); + }, + + async getTeamWorkflowStates(teamId: string): Promise { + logger.debug('Fetching Linear team workflow states', { teamId }); + const data = await linearGraphQL<{ + team: { states: { nodes: unknown[] } }; + }>( + `query GetTeamWorkflowStates($id: String!) { + team(id: $id) { + states { + nodes { + ${STATE_FIELDS} + } + } + } + }`, + { id: teamId }, + ); + return ( + data.team.states.nodes as Array<{ + id?: string; + name?: string; + type?: string; + color?: string; + }> + ).map(mapState); + }, + + async getTeamLabels(teamId: string): Promise { + logger.debug('Fetching Linear team labels', { teamId }); + const data = await linearGraphQL<{ + team: { labels: { nodes: unknown[] } }; + }>( + `query GetTeamLabels($id: String!) { + team(id: $id) { + labels { + nodes { + ${LABEL_FIELDS} + } + } + } + }`, + { id: teamId }, + ); + return ( + data.team.labels.nodes as Array<{ + id?: string; + name?: string; + color?: string; + description?: string | null; + }> + ).map(mapLabel); + }, + // ===== User ===== async getMe(): Promise { diff --git a/tests/unit/pm/linear/client.test.ts b/tests/unit/pm/linear/client.test.ts new file mode 100644 index 00000000..e481c911 --- /dev/null +++ b/tests/unit/pm/linear/client.test.ts @@ -0,0 +1,324 @@ +import { beforeEach, describe, expect, it, vi } from 'vitest'; + +// --------------------------------------------------------------------------- +// Mock fetch globally +// --------------------------------------------------------------------------- + +const mockFetch = vi.fn(); +vi.stubGlobal('fetch', mockFetch); + +// --------------------------------------------------------------------------- +// Helpers +// --------------------------------------------------------------------------- + +function makeGraphQLResponse(data: unknown) { + return { + ok: true, + json: vi.fn().mockResolvedValue({ data }), + }; +} + +function makeGraphQLErrorResponse(message: string) { + return { + ok: true, + json: vi.fn().mockResolvedValue({ errors: [{ message }] }), + }; +} + +function makeHttpErrorResponse(status: number) { + return { + ok: false, + status, + json: vi.fn().mockResolvedValue({}), + }; +} + +// --------------------------------------------------------------------------- +// Import the client under test +// --------------------------------------------------------------------------- + +import { linearClient, withLinearCredentials } from '../../../../src/linear/client.js'; + +const TEST_CREDS = { apiKey: 'test-api-key' }; + +// --------------------------------------------------------------------------- +// Tests +// --------------------------------------------------------------------------- + +describe('linearClient discovery methods', () => { + beforeEach(() => { + vi.clearAllMocks(); + }); + + // ========================================================================= + // getTeams + // ========================================================================= + describe('getTeams', () => { + it('returns an array of LinearTeam objects', async () => { + mockFetch.mockResolvedValue( + makeGraphQLResponse({ + teams: { + nodes: [ + { id: 'team-1', name: 'Engineering', key: 'ENG', description: 'Main team' }, + { id: 'team-2', name: 'Design', key: 'DES', description: null }, + ], + }, + }), + ); + + const result = await withLinearCredentials(TEST_CREDS, () => linearClient.getTeams()); + + expect(result).toHaveLength(2); + expect(result[0]).toEqual({ + id: 'team-1', + name: 'Engineering', + key: 'ENG', + description: 'Main team', + }); + expect(result[1]).toEqual({ + id: 'team-2', + name: 'Design', + key: 'DES', + description: null, + }); + }); + + it('returns an empty array when no teams are available', async () => { + mockFetch.mockResolvedValue( + makeGraphQLResponse({ + teams: { nodes: [] }, + }), + ); + + const result = await withLinearCredentials(TEST_CREDS, () => linearClient.getTeams()); + + expect(result).toEqual([]); + }); + + it('uses defaults for missing fields in team nodes', async () => { + mockFetch.mockResolvedValue( + makeGraphQLResponse({ + teams: { + nodes: [{}], + }, + }), + ); + + const result = await withLinearCredentials(TEST_CREDS, () => linearClient.getTeams()); + + expect(result[0]).toEqual({ id: '', name: '', key: '', description: null }); + }); + + it('throws on GraphQL errors', async () => { + mockFetch.mockResolvedValue(makeGraphQLErrorResponse('Unauthorized')); + + await expect( + withLinearCredentials(TEST_CREDS, () => linearClient.getTeams()), + ).rejects.toThrow('Linear API error: Unauthorized'); + }); + + it('throws on HTTP errors', async () => { + mockFetch.mockResolvedValue(makeHttpErrorResponse(401)); + + await expect( + withLinearCredentials(TEST_CREDS, () => linearClient.getTeams()), + ).rejects.toThrow('Linear API HTTP error 401'); + }); + + it('sends the correct Authorization header', async () => { + mockFetch.mockResolvedValue(makeGraphQLResponse({ teams: { nodes: [] } })); + + await withLinearCredentials(TEST_CREDS, () => linearClient.getTeams()); + + expect(mockFetch).toHaveBeenCalledWith( + 'https://api.linear.app/graphql', + expect.objectContaining({ + headers: expect.objectContaining({ + Authorization: 'Bearer test-api-key', + }), + }), + ); + }); + }); + + // ========================================================================= + // getTeamWorkflowStates + // ========================================================================= + describe('getTeamWorkflowStates', () => { + it('returns an array of LinearWorkflowState objects for the given team', async () => { + mockFetch.mockResolvedValue( + makeGraphQLResponse({ + team: { + states: { + nodes: [ + { id: 'state-1', name: 'Backlog', type: 'backlog', color: '#aaa' }, + { id: 'state-2', name: 'In Progress', type: 'started', color: '#00f' }, + { id: 'state-3', name: 'Done', type: 'completed', color: '#0f0' }, + ], + }, + }, + }), + ); + + const result = await withLinearCredentials(TEST_CREDS, () => + linearClient.getTeamWorkflowStates('team-1'), + ); + + expect(result).toHaveLength(3); + expect(result[0]).toEqual({ id: 'state-1', name: 'Backlog', type: 'backlog', color: '#aaa' }); + expect(result[1]).toEqual({ + id: 'state-2', + name: 'In Progress', + type: 'started', + color: '#00f', + }); + expect(result[2]).toEqual({ id: 'state-3', name: 'Done', type: 'completed', color: '#0f0' }); + }); + + it('returns an empty array when team has no workflow states', async () => { + mockFetch.mockResolvedValue( + makeGraphQLResponse({ + team: { states: { nodes: [] } }, + }), + ); + + const result = await withLinearCredentials(TEST_CREDS, () => + linearClient.getTeamWorkflowStates('team-1'), + ); + + expect(result).toEqual([]); + }); + + it('uses defaults for missing fields in state nodes', async () => { + mockFetch.mockResolvedValue( + makeGraphQLResponse({ + team: { states: { nodes: [{}] } }, + }), + ); + + const result = await withLinearCredentials(TEST_CREDS, () => + linearClient.getTeamWorkflowStates('team-1'), + ); + + expect(result[0]).toEqual({ id: '', name: '', type: '', color: '' }); + }); + + it('passes the teamId variable in the GraphQL request', async () => { + mockFetch.mockResolvedValue(makeGraphQLResponse({ team: { states: { nodes: [] } } })); + + await withLinearCredentials(TEST_CREDS, () => + linearClient.getTeamWorkflowStates('my-team-id'), + ); + + const body = JSON.parse(mockFetch.mock.calls[0][1].body as string); + expect(body.variables).toEqual({ id: 'my-team-id' }); + }); + + it('throws on GraphQL errors', async () => { + mockFetch.mockResolvedValue(makeGraphQLErrorResponse('Team not found')); + + await expect( + withLinearCredentials(TEST_CREDS, () => linearClient.getTeamWorkflowStates('bad-id')), + ).rejects.toThrow('Linear API error: Team not found'); + }); + + it('throws on HTTP errors', async () => { + mockFetch.mockResolvedValue(makeHttpErrorResponse(500)); + + await expect( + withLinearCredentials(TEST_CREDS, () => linearClient.getTeamWorkflowStates('team-1')), + ).rejects.toThrow('Linear API HTTP error 500'); + }); + }); + + // ========================================================================= + // getTeamLabels + // ========================================================================= + describe('getTeamLabels', () => { + it('returns an array of LinearLabel objects for the given team', async () => { + mockFetch.mockResolvedValue( + makeGraphQLResponse({ + team: { + labels: { + nodes: [ + { id: 'label-1', name: 'Bug', color: '#f00', description: 'A bug' }, + { id: 'label-2', name: 'Feature', color: '#0f0', description: null }, + ], + }, + }, + }), + ); + + const result = await withLinearCredentials(TEST_CREDS, () => + linearClient.getTeamLabels('team-1'), + ); + + expect(result).toHaveLength(2); + expect(result[0]).toEqual({ + id: 'label-1', + name: 'Bug', + color: '#f00', + description: 'A bug', + }); + expect(result[1]).toEqual({ + id: 'label-2', + name: 'Feature', + color: '#0f0', + description: null, + }); + }); + + it('returns an empty array when team has no labels', async () => { + mockFetch.mockResolvedValue( + makeGraphQLResponse({ + team: { labels: { nodes: [] } }, + }), + ); + + const result = await withLinearCredentials(TEST_CREDS, () => + linearClient.getTeamLabels('team-1'), + ); + + expect(result).toEqual([]); + }); + + it('uses defaults for missing fields in label nodes', async () => { + mockFetch.mockResolvedValue( + makeGraphQLResponse({ + team: { labels: { nodes: [{}] } }, + }), + ); + + const result = await withLinearCredentials(TEST_CREDS, () => + linearClient.getTeamLabels('team-1'), + ); + + expect(result[0]).toEqual({ id: '', name: '', color: '', description: null }); + }); + + it('passes the teamId variable in the GraphQL request', async () => { + mockFetch.mockResolvedValue(makeGraphQLResponse({ team: { labels: { nodes: [] } } })); + + await withLinearCredentials(TEST_CREDS, () => linearClient.getTeamLabels('my-team-id')); + + const body = JSON.parse(mockFetch.mock.calls[0][1].body as string); + expect(body.variables).toEqual({ id: 'my-team-id' }); + }); + + it('throws on GraphQL errors', async () => { + mockFetch.mockResolvedValue(makeGraphQLErrorResponse('Permission denied')); + + await expect( + withLinearCredentials(TEST_CREDS, () => linearClient.getTeamLabels('bad-id')), + ).rejects.toThrow('Linear API error: Permission denied'); + }); + + it('throws on HTTP errors', async () => { + mockFetch.mockResolvedValue(makeHttpErrorResponse(403)); + + await expect( + withLinearCredentials(TEST_CREDS, () => linearClient.getTeamLabels('team-1')), + ).rejects.toThrow('Linear API HTTP error 403'); + }); + }); +}); From acfa0974d956df79fc724d7d375ac846dc9eb996 Mon Sep 17 00:00:00 2001 From: aaight Date: Tue, 14 Apr 2026 23:55:09 +0200 Subject: [PATCH 35/52] feat(api): add Linear tRPC discovery endpoints (#1105) Co-authored-by: Cascade Bot --- src/api/routers/integrationsDiscovery.ts | 117 ++++++++ .../api/routers/integrationsDiscovery.test.ts | 270 ++++++++++++++++++ 2 files changed, 387 insertions(+) diff --git a/src/api/routers/integrationsDiscovery.ts b/src/api/routers/integrationsDiscovery.ts index cdd3bdfb..e2a68129 100644 --- a/src/api/routers/integrationsDiscovery.ts +++ b/src/api/routers/integrationsDiscovery.ts @@ -4,6 +4,7 @@ import { z } from 'zod'; import { getIntegrationCredentialOrNull } from '../../config/provider.js'; import { getIntegrationByProjectAndCategory } from '../../db/repositories/integrationsRepository.js'; import { jiraClient, withJiraCredentials } from '../../jira/client.js'; +import { linearClient, withLinearCredentials } from '../../linear/client.js'; import { trelloClient, withTrelloCredentials } from '../../trello/client.js'; import { logger } from '../../utils/logging.js'; import { protectedProcedure, router } from '../trpc.js'; @@ -27,6 +28,10 @@ const jiraCredsInput = z.object({ baseUrl: z.string().url(), }); +const linearCredsInput = z.object({ + apiKey: z.string().min(1), +}); + async function withTrelloCreds( input: z.infer, label: string, @@ -45,6 +50,14 @@ async function withJiraCreds( ); } +async function withLinearCreds( + input: z.infer, + label: string, + fn: (creds: { apiKey: string }) => Promise, +): Promise { + return wrapIntegrationCall(label, () => fn({ apiKey: input.apiKey })); +} + export const integrationsDiscoveryRouter = router({ verifyTrello: protectedProcedure.input(trelloCredsInput).mutation(async ({ ctx, input }) => { logger.debug('integrationsDiscovery.verifyTrello called', { orgId: ctx.effectiveOrgId }); @@ -429,4 +442,108 @@ export const integrationsDiscoveryRouter = router({ }; }); }), + + /** + * Verify a raw Linear API key. + * Accepts a plaintext API key from the form and calls getMe() to verify it. + * Returns the authenticated user's id, name, and displayName. + */ + verifyLinear: protectedProcedure.input(linearCredsInput).mutation(async ({ ctx, input }) => { + logger.debug('integrationsDiscovery.verifyLinear called', { orgId: ctx.effectiveOrgId }); + return withLinearCreds(input, 'Failed to verify Linear credentials', (creds) => + withLinearCredentials(creds, () => + linearClient.getMe().then((me) => ({ + id: me.id, + name: me.name, + displayName: me.displayName, + })), + ), + ); + }), + + /** + * Fetch Linear teams using raw API key credentials. + * Returns all teams accessible by the provided API key. + */ + linearTeams: protectedProcedure.input(linearCredsInput).mutation(async ({ ctx, input }) => { + logger.debug('integrationsDiscovery.linearTeams called', { orgId: ctx.effectiveOrgId }); + return withLinearCreds(input, 'Failed to fetch Linear teams', (creds) => + withLinearCredentials(creds, () => linearClient.getTeams()), + ); + }), + + /** + * Fetch Linear teams using stored project credentials. + * Resolves the API key from the project's stored credentials and returns all teams. + */ + linearTeamsByProject: protectedProcedure + .input(z.object({ projectId: z.string() })) + .mutation(async ({ ctx, input }) => { + logger.debug('integrationsDiscovery.linearTeamsByProject called', { + orgId: ctx.effectiveOrgId, + projectId: input.projectId, + }); + await verifyProjectOrgAccess(input.projectId, ctx.effectiveOrgId); + const apiKey = await getIntegrationCredentialOrNull(input.projectId, 'pm', 'api_key'); + if (!apiKey) { + throw new TRPCError({ + code: 'NOT_FOUND', + message: 'Linear credentials not configured', + }); + } + return wrapIntegrationCall('Failed to fetch Linear teams', () => + withLinearCredentials({ apiKey }, () => linearClient.getTeams()), + ); + }), + + /** + * Fetch Linear team workflow states and labels using raw API key credentials. + * Returns both states and labels for the given teamId. + */ + linearTeamDetails: protectedProcedure + .input(linearCredsInput.extend({ teamId: z.string().min(1) })) + .mutation(async ({ ctx, input }) => { + logger.debug('integrationsDiscovery.linearTeamDetails called', { + orgId: ctx.effectiveOrgId, + teamId: input.teamId, + }); + return withLinearCreds(input, 'Failed to fetch Linear team details', (creds) => + withLinearCredentials(creds, () => + Promise.all([ + linearClient.getTeamWorkflowStates(input.teamId), + linearClient.getTeamLabels(input.teamId), + ]).then(([states, labels]) => ({ states, labels })), + ), + ); + }), + + /** + * Fetch Linear team workflow states and labels using stored project credentials. + * Resolves the API key from stored credentials and returns states and labels for the team. + */ + linearTeamDetailsByProject: protectedProcedure + .input(z.object({ projectId: z.string(), teamId: z.string().min(1) })) + .mutation(async ({ ctx, input }) => { + logger.debug('integrationsDiscovery.linearTeamDetailsByProject called', { + orgId: ctx.effectiveOrgId, + projectId: input.projectId, + teamId: input.teamId, + }); + await verifyProjectOrgAccess(input.projectId, ctx.effectiveOrgId); + const apiKey = await getIntegrationCredentialOrNull(input.projectId, 'pm', 'api_key'); + if (!apiKey) { + throw new TRPCError({ + code: 'NOT_FOUND', + message: 'Linear credentials not configured', + }); + } + return wrapIntegrationCall('Failed to fetch Linear team details', () => + withLinearCredentials({ apiKey }, () => + Promise.all([ + linearClient.getTeamWorkflowStates(input.teamId), + linearClient.getTeamLabels(input.teamId), + ]).then(([states, labels]) => ({ states, labels })), + ), + ); + }), }); diff --git a/tests/unit/api/routers/integrationsDiscovery.test.ts b/tests/unit/api/routers/integrationsDiscovery.test.ts index 6eab94b8..83f82479 100644 --- a/tests/unit/api/routers/integrationsDiscovery.test.ts +++ b/tests/unit/api/routers/integrationsDiscovery.test.ts @@ -15,6 +15,10 @@ const { mockJiraGetIssueTypesForProject, mockJiraGetFields, mockJiraCreateCustomField, + mockLinearGetMe, + mockLinearGetTeams, + mockLinearGetTeamWorkflowStates, + mockLinearGetTeamLabels, mockGetAuthenticated, mockVerifyProjectOrgAccess, mockGetIntegrationCredentialOrNull, @@ -33,6 +37,10 @@ const { mockJiraGetIssueTypesForProject: vi.fn(), mockJiraGetFields: vi.fn(), mockJiraCreateCustomField: vi.fn(), + mockLinearGetMe: vi.fn(), + mockLinearGetTeams: vi.fn(), + mockLinearGetTeamWorkflowStates: vi.fn(), + mockLinearGetTeamLabels: vi.fn(), mockGetAuthenticated: vi.fn(), mockVerifyProjectOrgAccess: vi.fn(), mockGetIntegrationCredentialOrNull: vi.fn(), @@ -70,6 +78,19 @@ vi.mock('../../../../src/jira/client.js', () => ({ }, })); +vi.mock('../../../../src/linear/client.js', () => ({ + withLinearCredentials: (...args: unknown[]) => { + const cb = args[1] as () => unknown; + return cb(); + }, + linearClient: { + getMe: mockLinearGetMe, + getTeams: mockLinearGetTeams, + getTeamWorkflowStates: mockLinearGetTeamWorkflowStates, + getTeamLabels: mockLinearGetTeamLabels, + }, +})); + vi.mock('../../../../src/utils/logging.js', () => ({ logger: { debug: vi.fn(), info: vi.fn(), warn: vi.fn(), error: vi.fn() }, })); @@ -182,6 +203,37 @@ describe('integrationsDiscoveryRouter', () => { 'UNAUTHORIZED', ); }); + + it('verifyLinear throws UNAUTHORIZED when not authenticated', async () => { + const caller = createCaller({ user: null, effectiveOrgId: null }); + await expectTRPCError(caller.verifyLinear({ apiKey: 'lin_api_test' }), 'UNAUTHORIZED'); + }); + + it('linearTeams throws UNAUTHORIZED when not authenticated', async () => { + const caller = createCaller({ user: null, effectiveOrgId: null }); + await expectTRPCError(caller.linearTeams({ apiKey: 'lin_api_test' }), 'UNAUTHORIZED'); + }); + + it('linearTeamsByProject throws UNAUTHORIZED when not authenticated', async () => { + const caller = createCaller({ user: null, effectiveOrgId: null }); + await expectTRPCError(caller.linearTeamsByProject({ projectId: 'proj-1' }), 'UNAUTHORIZED'); + }); + + it('linearTeamDetails throws UNAUTHORIZED when not authenticated', async () => { + const caller = createCaller({ user: null, effectiveOrgId: null }); + await expectTRPCError( + caller.linearTeamDetails({ apiKey: 'lin_api_test', teamId: 'team-1' }), + 'UNAUTHORIZED', + ); + }); + + it('linearTeamDetailsByProject throws UNAUTHORIZED when not authenticated', async () => { + const caller = createCaller({ user: null, effectiveOrgId: null }); + await expectTRPCError( + caller.linearTeamDetailsByProject({ projectId: 'proj-1', teamId: 'team-1' }), + 'UNAUTHORIZED', + ); + }); }); // ── verifyTrello ───────────────────────────────────────────────────── @@ -960,6 +1012,224 @@ describe('integrationsDiscoveryRouter', () => { }); }); + // ── verifyLinear ───────────────────────────────────────────────────── + + describe('verifyLinear', () => { + const linearCredsInput = { apiKey: 'lin_api_test' }; + + it('returns id, name, and displayName on success', async () => { + mockLinearGetMe.mockResolvedValue({ + id: 'linear-user-123', + name: 'Linear User', + displayName: 'linearuser', + email: 'linear@example.com', + avatarUrl: null, + active: true, + }); + + const caller = createCaller({ user: mockUser, effectiveOrgId: mockUser.orgId }); + const result = await caller.verifyLinear(linearCredsInput); + + expect(result).toEqual({ + id: 'linear-user-123', + name: 'Linear User', + displayName: 'linearuser', + }); + }); + + it('wraps API failure in BAD_REQUEST', async () => { + mockLinearGetMe.mockRejectedValue(new Error('Invalid API key')); + + const caller = createCaller({ user: mockUser, effectiveOrgId: mockUser.orgId }); + await expect(caller.verifyLinear(linearCredsInput)).rejects.toMatchObject({ + code: 'BAD_REQUEST', + }); + }); + + it('rejects empty apiKey', async () => { + const caller = createCaller({ user: mockUser, effectiveOrgId: mockUser.orgId }); + await expect(caller.verifyLinear({ apiKey: '' })).rejects.toThrow(); + }); + }); + + // ── linearTeams ─────────────────────────────────────────────────────── + + describe('linearTeams', () => { + const linearCredsInput = { apiKey: 'lin_api_test' }; + + it('returns teams list on success', async () => { + const teams = [ + { id: 'team-1', name: 'Engineering', key: 'ENG', description: null }, + { id: 'team-2', name: 'Design', key: 'DES', description: 'Design team' }, + ]; + mockLinearGetTeams.mockResolvedValue(teams); + + const caller = createCaller({ user: mockUser, effectiveOrgId: mockUser.orgId }); + const result = await caller.linearTeams(linearCredsInput); + + expect(result).toEqual(teams); + }); + + it('wraps API failure in BAD_REQUEST', async () => { + mockLinearGetTeams.mockRejectedValue(new Error('Network error')); + + const caller = createCaller({ user: mockUser, effectiveOrgId: mockUser.orgId }); + await expect(caller.linearTeams(linearCredsInput)).rejects.toMatchObject({ + code: 'BAD_REQUEST', + }); + }); + }); + + // ── linearTeamsByProject ────────────────────────────────────────────── + + describe('linearTeamsByProject', () => { + it('returns teams using stored project credentials', async () => { + mockGetIntegrationCredentialOrNull.mockResolvedValueOnce('stored-api-key'); + const teams = [{ id: 'team-1', name: 'Engineering', key: 'ENG', description: null }]; + mockLinearGetTeams.mockResolvedValue(teams); + + const caller = createCaller({ user: mockUser, effectiveOrgId: mockUser.orgId }); + const result = await caller.linearTeamsByProject({ projectId: 'proj-1' }); + + expect(mockVerifyProjectOrgAccess).toHaveBeenCalledWith('proj-1', mockUser.orgId); + expect(result).toEqual(teams); + }); + + it('throws NOT_FOUND when apiKey credential is missing', async () => { + mockGetIntegrationCredentialOrNull.mockResolvedValue(null); + + const caller = createCaller({ user: mockUser, effectiveOrgId: mockUser.orgId }); + await expect(caller.linearTeamsByProject({ projectId: 'proj-1' })).rejects.toMatchObject({ + code: 'NOT_FOUND', + }); + }); + + it('propagates org access denial', async () => { + const { TRPCError } = await import('@trpc/server'); + mockVerifyProjectOrgAccess.mockRejectedValue( + new TRPCError({ code: 'FORBIDDEN', message: 'Access denied' }), + ); + + const caller = createCaller({ user: mockUser, effectiveOrgId: mockUser.orgId }); + await expect( + caller.linearTeamsByProject({ projectId: 'other-org-proj' }), + ).rejects.toMatchObject({ + code: 'FORBIDDEN', + }); + }); + + it('wraps Linear API failure in BAD_REQUEST', async () => { + mockGetIntegrationCredentialOrNull.mockResolvedValueOnce('stored-api-key'); + mockLinearGetTeams.mockRejectedValue(new Error('API error')); + + const caller = createCaller({ user: mockUser, effectiveOrgId: mockUser.orgId }); + await expect(caller.linearTeamsByProject({ projectId: 'proj-1' })).rejects.toMatchObject({ + code: 'BAD_REQUEST', + }); + }); + }); + + // ── linearTeamDetails ───────────────────────────────────────────────── + + describe('linearTeamDetails', () => { + const linearCredsInput = { apiKey: 'lin_api_test' }; + + it('returns states and labels on success', async () => { + const states = [ + { id: 'state-1', name: 'Todo', type: 'unstarted', color: '#aaa' }, + { id: 'state-2', name: 'In Progress', type: 'started', color: '#bbb' }, + ]; + const labels = [ + { id: 'label-1', name: 'Bug', color: '#f00', description: null }, + { id: 'label-2', name: 'Feature', color: '#0f0', description: 'New feature' }, + ]; + mockLinearGetTeamWorkflowStates.mockResolvedValue(states); + mockLinearGetTeamLabels.mockResolvedValue(labels); + + const caller = createCaller({ user: mockUser, effectiveOrgId: mockUser.orgId }); + const result = await caller.linearTeamDetails({ ...linearCredsInput, teamId: 'team-1' }); + + expect(result).toEqual({ states, labels }); + expect(mockLinearGetTeamWorkflowStates).toHaveBeenCalledWith('team-1'); + expect(mockLinearGetTeamLabels).toHaveBeenCalledWith('team-1'); + }); + + it('wraps API failure in BAD_REQUEST', async () => { + mockLinearGetTeamWorkflowStates.mockRejectedValue(new Error('Team not found')); + + const caller = createCaller({ user: mockUser, effectiveOrgId: mockUser.orgId }); + await expect( + caller.linearTeamDetails({ ...linearCredsInput, teamId: 'team-1' }), + ).rejects.toMatchObject({ code: 'BAD_REQUEST' }); + }); + + it('rejects empty teamId', async () => { + const caller = createCaller({ user: mockUser, effectiveOrgId: mockUser.orgId }); + await expect(caller.linearTeamDetails({ ...linearCredsInput, teamId: '' })).rejects.toThrow(); + }); + }); + + // ── linearTeamDetailsByProject ──────────────────────────────────────── + + describe('linearTeamDetailsByProject', () => { + it('returns team details using stored project credentials', async () => { + mockGetIntegrationCredentialOrNull.mockResolvedValueOnce('stored-api-key'); + const states = [{ id: 'state-1', name: 'Todo', type: 'unstarted', color: '#aaa' }]; + const labels = [{ id: 'label-1', name: 'Bug', color: '#f00', description: null }]; + mockLinearGetTeamWorkflowStates.mockResolvedValue(states); + mockLinearGetTeamLabels.mockResolvedValue(labels); + + const caller = createCaller({ user: mockUser, effectiveOrgId: mockUser.orgId }); + const result = await caller.linearTeamDetailsByProject({ + projectId: 'proj-1', + teamId: 'team-1', + }); + + expect(mockVerifyProjectOrgAccess).toHaveBeenCalledWith('proj-1', mockUser.orgId); + expect(result).toEqual({ states, labels }); + expect(mockLinearGetTeamWorkflowStates).toHaveBeenCalledWith('team-1'); + expect(mockLinearGetTeamLabels).toHaveBeenCalledWith('team-1'); + }); + + it('throws NOT_FOUND when apiKey credential is missing', async () => { + mockGetIntegrationCredentialOrNull.mockResolvedValue(null); + + const caller = createCaller({ user: mockUser, effectiveOrgId: mockUser.orgId }); + await expect( + caller.linearTeamDetailsByProject({ projectId: 'proj-1', teamId: 'team-1' }), + ).rejects.toMatchObject({ code: 'NOT_FOUND' }); + }); + + it('propagates org access denial', async () => { + const { TRPCError } = await import('@trpc/server'); + mockVerifyProjectOrgAccess.mockRejectedValue( + new TRPCError({ code: 'FORBIDDEN', message: 'Access denied' }), + ); + + const caller = createCaller({ user: mockUser, effectiveOrgId: mockUser.orgId }); + await expect( + caller.linearTeamDetailsByProject({ projectId: 'other-org-proj', teamId: 'team-1' }), + ).rejects.toMatchObject({ code: 'FORBIDDEN' }); + }); + + it('rejects empty teamId', async () => { + const caller = createCaller({ user: mockUser, effectiveOrgId: mockUser.orgId }); + await expect( + caller.linearTeamDetailsByProject({ projectId: 'proj-1', teamId: '' }), + ).rejects.toThrow(); + }); + + it('wraps Linear API failure in BAD_REQUEST', async () => { + mockGetIntegrationCredentialOrNull.mockResolvedValueOnce('stored-api-key'); + mockLinearGetTeamWorkflowStates.mockRejectedValue(new Error('Team not found')); + + const caller = createCaller({ user: mockUser, effectiveOrgId: mockUser.orgId }); + await expect( + caller.linearTeamDetailsByProject({ projectId: 'proj-1', teamId: 'team-1' }), + ).rejects.toMatchObject({ code: 'BAD_REQUEST' }); + }); + }); + // ── verifySentry ───────────────────────────────────────────────────── describe('verifySentry', () => { From 09ee1a7d6e96589b5a442248c80875dbd2edfc5b Mon Sep 17 00:00:00 2001 From: aaight Date: Wed, 15 Apr 2026 00:04:50 +0200 Subject: [PATCH 36/52] feat(reactions): wire up Linear reaction for Comment.create webhook events (#1106) Co-authored-by: Cascade Bot --- src/router/reactions.ts | 33 ++++++++-- tests/unit/router/reactions.test.ts | 96 +++++++++++++++++++++++++++++ 2 files changed, 124 insertions(+), 5 deletions(-) diff --git a/src/router/reactions.ts b/src/router/reactions.ts index 23a1d8df..e47e8085 100644 --- a/src/router/reactions.ts +++ b/src/router/reactions.ts @@ -9,7 +9,9 @@ */ import { getProjectGitHubToken } from '../config/projects.js'; +import { getIntegrationCredential } from '../config/provider.js'; import { isCascadeBot, type PersonaIdentities } from '../github/personas.js'; +import { linearClient, withLinearCredentials } from '../linear/client.js'; import { trelloClient, withTrelloCredentials } from '../trello/client.js'; import type { ProjectConfig } from '../types/index.js'; import { logger } from '../utils/logging.js'; @@ -160,10 +162,31 @@ async function sendJiraReaction(projectId: string, payload: unknown): Promise { - // Linear does not support emoji reactions on comments via the same API pattern - // as Trello/JIRA. This is a no-op placeholder for API consistency. - logger.info('[Reactions] Linear reaction skipped (not supported via webhook API)'); +async function sendLinearReaction(projectId: string, payload: unknown): Promise { + // Only react to Comment.create events + const p = payload as Record; + if (p.type !== 'Comment' || p.action !== 'create') return; + + const data = p.data as Record | undefined; + const commentId = data?.id as string | undefined; + if (!commentId) return; + + let apiKey: string; + try { + apiKey = await getIntegrationCredential(projectId, 'pm', 'api_key'); + } catch { + logger.warn('[Reactions] Missing Linear credentials, skipping reaction'); + return; + } + + try { + await withLinearCredentials({ apiKey }, async () => { + await linearClient.createReaction(commentId, '👀'); + }); + logger.info('[Reactions] Linear reaction sent for comment:', commentId); + } catch (err) { + logger.warn('[Reactions] Linear reaction failed:', String(err)); + } } // --------------------------------------------------------------------------- @@ -172,7 +195,7 @@ async function sendLinearReaction(_projectId: string, _payload: unknown): Promis /** * Send an acknowledgment reaction for an incoming webhook. - * Dispatches to Trello (👀), GitHub (👀), JIRA (💭), or Linear (no-op) based on source. + * Dispatches to Trello (👀), GitHub (👀), JIRA (💭), or Linear (👀) based on source. * * For GitHub, pass `repoFullName` as the `projectId` parameter, along with * `personaIdentities` and the already-resolved `project`. The reaction is diff --git a/tests/unit/router/reactions.test.ts b/tests/unit/router/reactions.test.ts index 3fc0f55e..c23b7917 100644 --- a/tests/unit/router/reactions.test.ts +++ b/tests/unit/router/reactions.test.ts @@ -33,6 +33,14 @@ vi.mock('../../../src/trello/client.js', () => ({ }, })); +// Mock linear client +vi.mock('../../../src/linear/client.js', () => ({ + withLinearCredentials: vi.fn(async (_creds: unknown, fn: () => Promise) => fn()), + linearClient: { + createReaction: vi.fn(), + }, +})); + // Mock logger vi.mock('../../../src/utils/logging.js', () => ({ logger: { @@ -50,6 +58,7 @@ import { getIntegrationCredential, } from '../../../src/config/provider.js'; import type { PersonaIdentities } from '../../../src/github/personas.js'; +import { linearClient, withLinearCredentials } from '../../../src/linear/client.js'; import { _resetJiraCloudIdCache, sendAcknowledgeReaction } from '../../../src/router/reactions.js'; import { trelloClient, withTrelloCredentials } from '../../../src/trello/client.js'; import type { ProjectConfig } from '../../../src/types/index.js'; @@ -61,6 +70,8 @@ const mockFindProjectByRepo = vi.mocked(findProjectByRepo); const mockFindProjectById = vi.mocked(findProjectById); const mockAddActionReaction = vi.mocked(trelloClient.addActionReaction); const mockWithTrelloCredentials = vi.mocked(withTrelloCredentials); +const mockCreateReaction = vi.mocked(linearClient.createReaction); +const mockWithLinearCredentials = vi.mocked(withLinearCredentials); const mockLogger = vi.mocked(logger); // Mock global fetch @@ -146,6 +157,9 @@ describe('sendAcknowledgeReaction', () => { mockAddActionReaction.mockReset(); mockWithTrelloCredentials.mockReset(); mockWithTrelloCredentials.mockImplementation(async (_creds, fn) => fn()); + mockCreateReaction.mockReset(); + mockWithLinearCredentials.mockReset(); + mockWithLinearCredentials.mockImplementation(async (_creds, fn) => fn()); _resetJiraCloudIdCache(); mockLogger.info.mockReset(); mockLogger.warn.mockReset(); @@ -601,6 +615,88 @@ describe('sendAcknowledgeReaction', () => { }); }); + // ------------------------------------------------------------------------- + // Linear + // ------------------------------------------------------------------------- + + describe('Linear reactions', () => { + const LINEAR_COMMENT_PAYLOAD = { + type: 'Comment', + action: 'create', + data: { id: 'comment-linear-123' }, + }; + + it('sends 👀 reaction for Comment.create event', async () => { + mockCreateReaction.mockResolvedValueOnce({ + id: 'reaction-1', + emoji: '👀', + user: null, + createdAt: '2026-01-01T00:00:00Z', + }); + + await sendAcknowledgeReaction('linear', PROJECT_ID, LINEAR_COMMENT_PAYLOAD); + + expect(mockCreateReaction).toHaveBeenCalledOnce(); + expect(mockCreateReaction).toHaveBeenCalledWith('comment-linear-123', '👀'); + }); + + it('skips reaction for non-comment Linear events (e.g. Issue.update)', async () => { + const payload = { type: 'Issue', action: 'update', data: { id: 'issue-abc' } }; + + await sendAcknowledgeReaction('linear', PROJECT_ID, payload); + + expect(mockCreateReaction).not.toHaveBeenCalled(); + }); + + it('skips reaction for Comment events that are not create (e.g. Comment.update)', async () => { + const payload = { type: 'Comment', action: 'update', data: { id: 'comment-xyz' } }; + + await sendAcknowledgeReaction('linear', PROJECT_ID, payload); + + expect(mockCreateReaction).not.toHaveBeenCalled(); + }); + + it('skips reaction when Linear credentials are missing and logs warning', async () => { + mockGetIntegrationCredential.mockRejectedValue(new Error('Credential not found')); + + await sendAcknowledgeReaction('linear', PROJECT_ID, LINEAR_COMMENT_PAYLOAD); + + expect(mockCreateReaction).not.toHaveBeenCalled(); + expect(mockLogger.warn).toHaveBeenCalledWith( + expect.stringContaining('Missing Linear credentials'), + ); + }); + + it('does not throw when Linear credentials are missing', async () => { + mockGetIntegrationCredential.mockRejectedValue(new Error('Credential not found')); + + await expect( + sendAcknowledgeReaction('linear', PROJECT_ID, LINEAR_COMMENT_PAYLOAD), + ).resolves.toBeUndefined(); + }); + + it('logs warning on Linear API error but does not throw', async () => { + mockCreateReaction.mockRejectedValueOnce(new Error('Linear API error: 403')); + + await expect( + sendAcknowledgeReaction('linear', PROJECT_ID, LINEAR_COMMENT_PAYLOAD), + ).resolves.toBeUndefined(); + + expect(mockLogger.warn).toHaveBeenCalledWith( + expect.stringContaining('Linear reaction failed'), + expect.stringContaining('403'), + ); + }); + + it('skips reaction when comment id is missing from payload data', async () => { + const payload = { type: 'Comment', action: 'create', data: {} }; + + await sendAcknowledgeReaction('linear', PROJECT_ID, payload); + + expect(mockCreateReaction).not.toHaveBeenCalled(); + }); + }); + // ------------------------------------------------------------------------- // Error handling (top-level) // ------------------------------------------------------------------------- From 946fc5ed1adec445590e2c41a0da59b19bb1ae8e Mon Sep 17 00:00:00 2001 From: aaight Date: Wed, 15 Apr 2026 00:20:25 +0200 Subject: [PATCH 37/52] feat(wizard): add Linear as PM provider option in dashboard wizard (#1107) Co-authored-by: Cascade Bot --- .../projects/pm-wizard-common-steps.tsx | 76 ++++++++ .../components/projects/pm-wizard-hooks.ts | 148 +++++++++++++- .../projects/pm-wizard-linear-steps.tsx | 184 ++++++++++++++++++ .../components/projects/pm-wizard-state.ts | 99 +++++++++- web/src/components/projects/pm-wizard.tsx | 42 +++- 5 files changed, 531 insertions(+), 18 deletions(-) create mode 100644 web/src/components/projects/pm-wizard-linear-steps.tsx diff --git a/web/src/components/projects/pm-wizard-common-steps.tsx b/web/src/components/projects/pm-wizard-common-steps.tsx index 2c97c272..6032faef 100644 --- a/web/src/components/projects/pm-wizard-common-steps.tsx +++ b/web/src/components/projects/pm-wizard-common-steps.tsx @@ -57,6 +57,71 @@ function CopyButton({ text }: { text: string }) { ); } +// ============================================================================ +// LinearWebhookInfoPanel +// ============================================================================ + +export function LinearWebhookInfoPanel({ webhookUrl }: { webhookUrl: string }) { + return ( +
+
+
+ +
+

+ Manual Webhook Setup Required +

+

+ Linear webhooks must be configured manually in your Linear team settings. CASCADE + cannot create them programmatically. +

+
+
+
+ +
+ +
+ {webhookUrl} + +
+
+ +
+

Setup instructions:

+
    +
  1. + Go to{' '} + + linear.app/settings/api + {' '} + and navigate to Webhooks +
  2. +
  3. Click "New webhook" and enter the URL above
  4. +
  5. + Enable events: Issues (created, updated, removed) +
  6. +
  7. Select your team and save — webhooks are team-scoped in Linear
  8. +
  9. + Optionally set a webhook secret and store it as{' '} + LINEAR_WEBHOOK_SECRET in + project credentials +
  10. +
+
+
+ ); +} + +// ============================================================================ +// WebhookStep +// ============================================================================ + export function WebhookStep({ state, webhooksQuery, @@ -64,6 +129,7 @@ export function WebhookStep({ callbackBaseUrl, createWebhookMutation, deleteWebhookMutation, + linearWebhookUrl, }: { state: WizardState; webhooksQuery: WebhooksQueryProps; @@ -71,7 +137,17 @@ export function WebhookStep({ callbackBaseUrl: string; createWebhookMutation: UseMutationResult; deleteWebhookMutation: UseMutationResult; + linearWebhookUrl?: string; }) { + // Linear uses a display-only panel — no create/delete buttons + if (state.provider === 'linear') { + return ( + + ); + } + const isTrello = state.provider === 'trello'; const providerName = isTrello ? 'Trello' : 'JIRA'; diff --git a/web/src/components/projects/pm-wizard-hooks.ts b/web/src/components/projects/pm-wizard-hooks.ts index 591ea0f8..836f1709 100644 --- a/web/src/components/projects/pm-wizard-hooks.ts +++ b/web/src/components/projects/pm-wizard-hooks.ts @@ -7,7 +7,12 @@ import { useMutation, useQueryClient } from '@tanstack/react-query'; import { useEffect } from 'react'; import { API_URL } from '@/lib/api.js'; import { trpc, trpcClient } from '@/lib/trpc.js'; -import type { WizardAction, WizardState } from './pm-wizard-state.js'; +import type { + LinearTeamDetails, + LinearTeamOption, + WizardAction, + WizardState, +} from './pm-wizard-state.js'; // ============================================================================ // Trello Discovery @@ -187,6 +192,99 @@ export function useJiraDiscovery( return { jiraProjectsMutation, jiraDetailsMutation, handleProjectSelect }; } +// ============================================================================ +// Linear Discovery +// ============================================================================ + +export function useLinearDiscovery( + state: WizardState, + dispatch: React.Dispatch, + advanceToStep: (step: number) => void, + projectId: string, +) { + const linearTeamsMutation = useMutation({ + mutationFn: () => { + if (state.isEditing && state.hasStoredCredentials && !state.linearApiKey) { + return trpcClient.integrationsDiscovery.linearTeamsByProject.mutate({ projectId }); + } + if (!state.linearApiKey) { + throw new Error('Enter your API key before fetching teams'); + } + return trpcClient.integrationsDiscovery.linearTeams.mutate({ + apiKey: state.linearApiKey, + }); + }, + onSuccess: (teams) => + dispatch({ + type: 'SET_LINEAR_TEAMS', + teams: teams as LinearTeamOption[], + }), + }); + + const linearDetailsMutation = useMutation({ + mutationFn: (teamId: string) => { + if (state.isEditing && state.hasStoredCredentials && !state.linearApiKey) { + return trpcClient.integrationsDiscovery.linearTeamDetailsByProject.mutate({ + projectId, + teamId, + }); + } + if (!state.linearApiKey) { + throw new Error('Enter your API key before fetching team details'); + } + return trpcClient.integrationsDiscovery.linearTeamDetails.mutate({ + apiKey: state.linearApiKey, + teamId, + }); + }, + onSuccess: (details) => { + dispatch({ + type: 'SET_LINEAR_TEAM_DETAILS', + details: details as LinearTeamDetails, + }); + advanceToStep(4); + }, + }); + + const handleTeamSelect = (teamId: string) => { + dispatch({ type: 'SET_LINEAR_TEAM_ID', id: teamId }); + if (teamId) { + linearDetailsMutation.mutate(teamId); + } + }; + + // Auto-fetch teams when verification result changes + // biome-ignore lint/correctness/useExhaustiveDependencies: intentionally trigger only on verification result change + useEffect(() => { + if (!state.verificationResult || state.provider !== 'linear') return; + if (state.linearTeams.length === 0 && !linearTeamsMutation.isPending) { + linearTeamsMutation.mutate(); + } + // eslint-disable-next-line react-hooks/exhaustive-deps + }, [state.verificationResult]); + + // In edit mode, auto-fetch team list and details + // biome-ignore lint/correctness/useExhaustiveDependencies: intentionally trigger on edit mode and stored creds + useEffect(() => { + if (!state.isEditing || state.provider !== 'linear') return; + const canFetch = state.linearApiKey ? true : state.hasStoredCredentials; + if (canFetch && state.linearTeams.length === 0 && !linearTeamsMutation.isPending) { + linearTeamsMutation.mutate(); + } + if ( + state.linearTeamId && + !state.linearTeamDetails && + canFetch && + !linearDetailsMutation.isPending + ) { + linearDetailsMutation.mutate(state.linearTeamId); + } + // eslint-disable-next-line react-hooks/exhaustive-deps + }, [state.isEditing, state.linearTeamId, state.hasStoredCredentials]); + + return { linearTeamsMutation, linearDetailsMutation, handleTeamSelect }; +} + // ============================================================================ // Verification // ============================================================================ @@ -209,6 +307,15 @@ export function useVerification( }); return { provider: 'trello' as const, result }; } + if (provider === 'linear') { + if (!state.linearApiKey) { + throw new Error('Enter your API key before verifying'); + } + const result = await trpcClient.integrationsDiscovery.verifyLinear.mutate({ + apiKey: state.linearApiKey, + }); + return { provider: 'linear' as const, result }; + } if (!state.jiraEmail || !state.jiraApiToken) { throw new Error('Enter both credentials before verifying'); } @@ -228,6 +335,12 @@ export function useVerification( type: 'SET_VERIFICATION', result: { provider: 'trello', display: `@${r.username} (${r.fullName})` }, }); + } else if (provider === 'linear') { + const r = result as { name: string; displayName: string }; + dispatch({ + type: 'SET_VERIFICATION', + result: { provider: 'linear', display: r.displayName || r.name }, + }); } else { const r = result as { displayName: string; emailAddress: string }; dispatch({ @@ -296,6 +409,22 @@ export function useWebhookManagement(projectId: string, state: WizardState) { }; } +// ============================================================================ +// Linear Webhook Info (display-only) +// ============================================================================ + +export function useLinearWebhookInfo() { + const callbackBaseUrl = + API_URL || + (typeof window !== 'undefined' ? window.location.origin.replace(':5173', ':3000') : ''); + + const webhookUrl = callbackBaseUrl + ? `${callbackBaseUrl}/linear/webhook` + : '/linear/webhook'; + + return { webhookUrl }; +} + // ============================================================================ // Trello Label Creation // ============================================================================ @@ -454,7 +583,7 @@ export function useSaveMutation(projectId: string, state: WizardState) { const queryClient = useQueryClient(); const saveMutation = useMutation({ - // biome-ignore lint/complexity/noExcessiveCognitiveComplexity: handles two provider types + credential persisting + // biome-ignore lint/complexity/noExcessiveCognitiveComplexity: handles three provider types + credential persisting mutationFn: async () => { let config: Record; if (state.provider === 'trello') { @@ -464,6 +593,12 @@ export function useSaveMutation(projectId: string, state: WizardState) { labels: state.trelloLabelMappings, ...(state.trelloCostFieldId ? { customFields: { cost: state.trelloCostFieldId } } : {}), }; + } else if (state.provider === 'linear') { + config = { + teamId: state.linearTeamId, + statuses: state.linearStatusMappings, + ...(Object.keys(state.linearLabels).length > 0 ? { labels: state.linearLabels } : {}), + }; } else { config = { projectKey: state.jiraProjectKey, @@ -502,6 +637,15 @@ export function useSaveMutation(projectId: string, state: WizardState) { name: 'Trello Token', }); } + } else if (state.provider === 'linear') { + if (state.linearApiKey) { + await trpcClient.projects.credentials.set.mutate({ + projectId, + envVarKey: 'LINEAR_API_KEY', + value: state.linearApiKey, + name: 'Linear API Key', + }); + } } else { if (state.jiraEmail) { await trpcClient.projects.credentials.set.mutate({ diff --git a/web/src/components/projects/pm-wizard-linear-steps.tsx b/web/src/components/projects/pm-wizard-linear-steps.tsx new file mode 100644 index 00000000..e07c406f --- /dev/null +++ b/web/src/components/projects/pm-wizard-linear-steps.tsx @@ -0,0 +1,184 @@ +/** + * Linear-specific step renderer components for PMWizard. + */ + +import type { UseMutationResult } from '@tanstack/react-query'; +import { CheckCircle2, Loader2 } from 'lucide-react'; +import { Input } from '@/components/ui/input.js'; +import { Label } from '@/components/ui/label.js'; +import type { WizardAction, WizardState } from './pm-wizard-state.js'; +import { FieldMappingRow, SearchableSelect } from './wizard-shared.js'; + +// ============================================================================ +// Slot definitions +// ============================================================================ + +const LINEAR_STATUS_SLOTS = ['backlog', 'inProgress', 'inReview', 'done']; + +const LINEAR_LABEL_SLOTS = ['processing', 'processed', 'error', 'readyToProcess', 'auto']; + +// ============================================================================ +// LinearCredentialsStep +// ============================================================================ + +export function LinearCredentialsStep({ + state, + dispatch, +}: { + state: WizardState; + dispatch: React.Dispatch; +}) { + return ( +
+ {state.isEditing && state.hasStoredCredentials && !state.linearApiKey && ( +
+ + Credentials stored — enter new values below to replace them. +
+ )} +

+ Enter your Linear API key. This will be saved securely to the project. +

+
+ + dispatch({ type: 'SET_LINEAR_API_KEY', value: e.target.value })} + placeholder="lin_api_..." + autoComplete="off" + /> +

+ Generate a Personal API key at{' '} + + linear.app/settings/api + +

+
+
+ ); +} + +// ============================================================================ +// LinearTeamStep +// ============================================================================ + +export function LinearTeamStep({ + state, + onTeamSelect, + linearTeamsMutation, + linearDetailsMutation, +}: { + state: WizardState; + onTeamSelect: (id: string) => void; + linearTeamsMutation: UseMutationResult; + linearDetailsMutation: UseMutationResult; +}) { + return ( +
+ + ({ + label: t.name, + value: t.id, + detail: t.key, + }))} + value={state.linearTeamId} + onChange={onTeamSelect} + placeholder="Select a Linear team..." + isLoading={linearTeamsMutation.isPending} + error={linearTeamsMutation.isError ? (linearTeamsMutation.error as Error).message : null} + onRetry={() => + (linearTeamsMutation as UseMutationResult).mutate() + } + /> + {state.linearTeamId && linearDetailsMutation.isPending && ( +
+ Loading team details... +
+ )} +
+ ); +} + +// ============================================================================ +// LinearFieldMappingStep +// ============================================================================ + +export function LinearFieldMappingStep({ + state, + dispatch, +}: { + state: WizardState; + dispatch: React.Dispatch; +}) { + return ( +
+ {/* Status mappings */} +
+ +

+ Map each CASCADE status to a Linear workflow state in the team. +

+ {state.linearTeamDetails ? ( + LINEAR_STATUS_SLOTS.map((slot) => ( + ({ + label: s.name, + value: s.name, + })) ?? [] + } + value={state.linearStatusMappings[slot] ?? ''} + onChange={(v) => + dispatch({ + type: 'SET_LINEAR_STATUS_MAPPING', + key: slot, + value: v, + }) + } + manualFallback + /> + )) + ) : ( +

+ Select a team first to populate status options. +

+ )} +
+ + {/* Labels */} +
+ +

+ CASCADE label names used in Linear. These are created automatically by CASCADE. +

+ {LINEAR_LABEL_SLOTS.map((slot) => ( +
+ {slot} + + dispatch({ + type: 'SET_LINEAR_LABEL', + key: slot, + value: e.target.value, + }) + } + placeholder={`Linear label for ${slot}`} + className="flex-1" + /> +
+ ))} +
+
+ ); +} diff --git a/web/src/components/projects/pm-wizard-state.ts b/web/src/components/projects/pm-wizard-state.ts index 8be4e6d3..1a6bbe3d 100644 --- a/web/src/components/projects/pm-wizard-state.ts +++ b/web/src/components/projects/pm-wizard-state.ts @@ -31,7 +31,18 @@ export interface JiraProjectDetails { fields: Array<{ id: string; name: string; custom: boolean }>; } -export type Provider = 'trello' | 'jira'; +export interface LinearTeamOption { + id: string; + name: string; + key: string; +} + +export interface LinearTeamDetails { + states: Array<{ id: string; name: string; type: string }>; + labels: Array<{ id: string; name: string; color: string }>; +} + +export type Provider = 'trello' | 'jira' | 'linear'; export interface WizardState { provider: Provider; @@ -41,6 +52,7 @@ export interface WizardState { jiraEmail: string; jiraApiToken: string; jiraBaseUrl: string; + linearApiKey: string; verificationResult: { provider: Provider; display: string } | null; verifyError: string | null; // Step 3: Board/Project @@ -48,9 +60,12 @@ export interface WizardState { trelloBoards: TrelloBoardOption[]; jiraProjectKey: string; jiraProjects: JiraProjectOption[]; + linearTeamId: string; + linearTeams: LinearTeamOption[]; // Step 4: Field mapping trelloBoardDetails: TrelloBoardDetails | null; jiraProjectDetails: JiraProjectDetails | null; + linearTeamDetails: LinearTeamDetails | null; // Trello mappings trelloListMappings: Record; trelloLabelMappings: Record; @@ -60,6 +75,9 @@ export interface WizardState { jiraIssueTypes: Record; jiraLabels: Record; jiraCostFieldId: string; + // Linear mappings + linearStatusMappings: Record; + linearLabels: Record; // Editing mode isEditing: boolean; hasStoredCredentials: boolean; // true in edit mode when provider credentials exist in project_credentials @@ -72,6 +90,7 @@ export type WizardAction = | { type: 'SET_JIRA_EMAIL'; value: string } | { type: 'SET_JIRA_API_TOKEN'; value: string } | { type: 'SET_JIRA_BASE_URL'; url: string } + | { type: 'SET_LINEAR_API_KEY'; value: string } | { type: 'SET_VERIFICATION'; result: { provider: Provider; display: string } | null; @@ -81,6 +100,9 @@ export type WizardAction = | { type: 'SET_TRELLO_BOARD_ID'; id: string } | { type: 'SET_JIRA_PROJECTS'; projects: JiraProjectOption[] } | { type: 'SET_JIRA_PROJECT_KEY'; key: string } + | { type: 'SET_LINEAR_TEAMS'; teams: LinearTeamOption[] } + | { type: 'SET_LINEAR_TEAM_ID'; id: string } + | { type: 'SET_LINEAR_TEAM_DETAILS'; details: LinearTeamDetails | null } | { type: 'SET_TRELLO_BOARD_DETAILS'; details: TrelloBoardDetails | null } | { type: 'SET_JIRA_PROJECT_DETAILS'; details: JiraProjectDetails | null } | { type: 'SET_TRELLO_LIST_MAPPING'; key: string; value: string } @@ -90,6 +112,8 @@ export type WizardAction = | { type: 'SET_JIRA_ISSUE_TYPE'; key: string; value: string } | { type: 'SET_JIRA_LABEL'; key: string; value: string } | { type: 'SET_JIRA_COST_FIELD'; id: string } + | { type: 'SET_LINEAR_STATUS_MAPPING'; key: string; value: string } + | { type: 'SET_LINEAR_LABEL'; key: string; value: string } | { type: 'INIT_EDIT'; state: Partial } | { type: 'ADD_TRELLO_BOARD_LABEL'; label: { id: string; name: string; color: string } } | { @@ -110,6 +134,14 @@ export const INITIAL_JIRA_LABELS: Record = { auto: 'cascade-auto', }; +export const INITIAL_LINEAR_LABELS: Record = { + processing: 'cascade-processing', + processed: 'cascade-processed', + error: 'cascade-error', + readyToProcess: 'cascade-ready', + auto: 'cascade-auto', +}; + export function createInitialState(): WizardState { return { provider: 'trello', @@ -118,14 +150,18 @@ export function createInitialState(): WizardState { jiraEmail: '', jiraApiToken: '', jiraBaseUrl: '', + linearApiKey: '', verificationResult: null, verifyError: null, trelloBoardId: '', trelloBoards: [], jiraProjectKey: '', jiraProjects: [], + linearTeamId: '', + linearTeams: [], trelloBoardDetails: null, jiraProjectDetails: null, + linearTeamDetails: null, trelloListMappings: {}, trelloLabelMappings: {}, trelloCostFieldId: '', @@ -133,6 +169,8 @@ export function createInitialState(): WizardState { jiraIssueTypes: {}, jiraLabels: { ...INITIAL_JIRA_LABELS }, jiraCostFieldId: '', + linearStatusMappings: {}, + linearLabels: { ...INITIAL_LINEAR_LABELS }, isEditing: false, hasStoredCredentials: false, }; @@ -179,6 +217,13 @@ export const wizardReducer: Reducer = (state, action) }; case 'SET_JIRA_BASE_URL': return { ...state, jiraBaseUrl: action.url, verificationResult: null, verifyError: null }; + case 'SET_LINEAR_API_KEY': + return { + ...state, + linearApiKey: action.value, + verificationResult: null, + verifyError: null, + }; case 'SET_VERIFICATION': return { ...state, verificationResult: action.result, verifyError: action.error ?? null }; case 'SET_TRELLO_BOARDS': @@ -203,6 +248,17 @@ export const wizardReducer: Reducer = (state, action) jiraIssueTypes: {}, jiraCostFieldId: '', }; + case 'SET_LINEAR_TEAMS': + return { ...state, linearTeams: action.teams }; + case 'SET_LINEAR_TEAM_ID': + return { + ...state, + linearTeamId: action.id, + linearTeamDetails: null, + linearStatusMappings: {}, + }; + case 'SET_LINEAR_TEAM_DETAILS': + return { ...state, linearTeamDetails: action.details }; case 'SET_TRELLO_BOARD_DETAILS': return { ...state, trelloBoardDetails: action.details }; case 'SET_JIRA_PROJECT_DETAILS': @@ -236,6 +292,16 @@ export const wizardReducer: Reducer = (state, action) }; case 'SET_JIRA_COST_FIELD': return { ...state, jiraCostFieldId: action.id }; + case 'SET_LINEAR_STATUS_MAPPING': + return { + ...state, + linearStatusMappings: { ...state.linearStatusMappings, [action.key]: action.value }, + }; + case 'SET_LINEAR_LABEL': + return { + ...state, + linearLabels: { ...state.linearLabels, [action.key]: action.value }, + }; case 'INIT_EDIT': return { ...state, ...action.state, isEditing: true }; case 'ADD_TRELLO_BOARD_LABEL': @@ -323,6 +389,16 @@ export function buildEditState( editState.hasStoredCredentials = configuredKeys.has('JIRA_EMAIL') && configuredKeys.has('JIRA_API_TOKEN'); + } else if (provider === 'linear') { + editState.linearTeamId = (initialConfig.teamId as string) ?? ''; + + const statuses = initialConfig.statuses as Record | undefined; + if (statuses) editState.linearStatusMappings = statuses; + + const labels = initialConfig.labels as Record | undefined; + if (labels) editState.linearLabels = labels; + + editState.hasStoredCredentials = configuredKeys.has('LINEAR_API_KEY'); } return editState; @@ -341,22 +417,27 @@ export function isStep2Complete(state: WizardState): boolean { const credsReady = state.provider === 'trello' ? !!(state.trelloApiKey && state.trelloToken) - : !!(state.jiraEmail && state.jiraApiToken && state.jiraBaseUrl); + : state.provider === 'jira' + ? !!(state.jiraEmail && state.jiraApiToken && state.jiraBaseUrl) + : !!state.linearApiKey; return credsReady && !!state.verificationResult; } export function isStep3Complete(state: WizardState): boolean { - return state.provider === 'trello' ? !!state.trelloBoardId : !!state.jiraProjectKey; + if (state.provider === 'trello') return !!state.trelloBoardId; + if (state.provider === 'jira') return !!state.jiraProjectKey; + return !!state.linearTeamId; } export function isStep4Complete(state: WizardState): boolean { - return state.provider === 'trello' - ? Object.keys(state.trelloListMappings).length > 0 - : Object.keys(state.jiraStatusMappings).length > 0; + if (state.provider === 'trello') return Object.keys(state.trelloListMappings).length > 0; + if (state.provider === 'jira') return Object.keys(state.jiraStatusMappings).length > 0; + return Object.keys(state.linearStatusMappings).length > 0; } export function areCredentialsReady(state: WizardState): boolean { - return state.provider === 'trello' - ? !!(state.trelloApiKey && state.trelloToken) - : !!(state.jiraEmail && state.jiraApiToken && state.jiraBaseUrl); + if (state.provider === 'trello') return !!(state.trelloApiKey && state.trelloToken); + if (state.provider === 'jira') + return !!(state.jiraEmail && state.jiraApiToken && state.jiraBaseUrl); + return !!state.linearApiKey; } diff --git a/web/src/components/projects/pm-wizard.tsx b/web/src/components/projects/pm-wizard.tsx index 3fab7918..7a9c6c49 100644 --- a/web/src/components/projects/pm-wizard.tsx +++ b/web/src/components/projects/pm-wizard.tsx @@ -7,6 +7,8 @@ import { SaveStep, WebhookStep } from './pm-wizard-common-steps.js'; import { useJiraCustomFieldCreation, useJiraDiscovery, + useLinearDiscovery, + useLinearWebhookInfo, useSaveMutation, useTrelloCustomFieldCreation, useTrelloDiscovery, @@ -19,6 +21,11 @@ import { JiraFieldMappingStep, JiraProjectStep, } from './pm-wizard-jira-steps.js'; +import { + LinearCredentialsStep, + LinearFieldMappingStep, + LinearTeamStep, +} from './pm-wizard-linear-steps.js'; import { areCredentialsReady, buildEditState, @@ -122,6 +129,12 @@ export function PMWizard({ advanceToStep, projectId, ); + const { linearTeamsMutation, linearDetailsMutation, handleTeamSelect } = useLinearDiscovery( + state, + dispatch, + advanceToStep, + projectId, + ); const { createLabelMutation, createMissingLabelsMutation } = useTrelloLabelCreation( state, dispatch, @@ -129,6 +142,7 @@ export function PMWizard({ const { createCustomFieldMutation } = useTrelloCustomFieldCreation(state, dispatch); const { createJiraCustomFieldMutation } = useJiraCustomFieldCreation(state, dispatch); const webhookManagement = useWebhookManagement(projectId, state); + const { webhookUrl: linearWebhookUrl } = useLinearWebhookInfo(); const { saveMutation } = useSaveMutation(projectId, state); // ---- Label creation handlers ---- @@ -198,11 +212,13 @@ export function PMWizard({ url: w.callbackURL, active: w.active, })) - : (webhooksQuery.data?.jira ?? []).map((w) => ({ - id: String(w.id), - url: w.url, - active: w.enabled, - })); + : state.provider === 'jira' + ? (webhooksQuery.data?.jira ?? []).map((w) => ({ + id: String(w.id), + url: w.url, + active: w.enabled, + })) + : []; // Linear: webhooks are configured manually // ---- Render ---- @@ -219,7 +235,7 @@ export function PMWizard({
- {(['trello', 'jira'] as const).map((p) => ( + {(['trello', 'jira', 'linear'] as const).map((p) => ( ))}
@@ -251,6 +267,8 @@ export function PMWizard({ > {state.provider === 'trello' ? ( + ) : state.provider === 'linear' ? ( + ) : ( )} @@ -301,6 +319,13 @@ export function PMWizard({ boardsMutation={boardsMutation} boardDetailsMutation={boardDetailsMutation} /> + ) : state.provider === 'linear' ? ( + ) : ( + ) : state.provider === 'linear' ? ( + ) : ( From 4fc794b7eadec5d1263ca32dd1b62a12a135c46a Mon Sep 17 00:00:00 2001 From: aaight Date: Wed, 15 Apr 2026 00:34:09 +0200 Subject: [PATCH 38/52] feat(webhooks): add Linear webhook info to webhooks dashboard and CLI tool (#1108) Co-authored-by: Cascade Bot --- src/api/routers/webhooks.ts | 26 +++- src/api/routers/webhooks/context.ts | 5 + src/api/routers/webhooks/types.ts | 8 + src/cli/dashboard/webhooks/create.ts | 17 ++ src/cli/dashboard/webhooks/list.ts | 10 ++ tests/unit/api/routers/webhooks.test.ts | 196 ++++++++++++++++++++++++ tools/setup-webhooks.ts | 59 ++++++- 7 files changed, 315 insertions(+), 6 deletions(-) diff --git a/src/api/routers/webhooks.ts b/src/api/routers/webhooks.ts index 1aa412c4..4c4096ae 100644 --- a/src/api/routers/webhooks.ts +++ b/src/api/routers/webhooks.ts @@ -16,11 +16,12 @@ import { trelloCreateWebhook, trelloDeleteWebhook, trelloListWebhooks } from './ import type { GitHubWebhook, JiraWebhookInfo, + LinearWebhookInfo, SentryWebhookInfo, TrelloWebhook, } from './webhooks/types.js'; -export type { GitHubWebhook, JiraWebhookInfo, SentryWebhookInfo, TrelloWebhook }; +export type { GitHubWebhook, JiraWebhookInfo, LinearWebhookInfo, SentryWebhookInfo, TrelloWebhook }; export const webhooksRouter = router({ list: adminProcedure @@ -52,15 +53,28 @@ export const webhooksRouter = router({ }; } + // Linear — informational only (webhooks must be configured in Linear team settings) + let linear: LinearWebhookInfo | null = null; + if (input.callbackBaseUrl && pctx.pmType === 'linear' && pctx.linearApiKey) { + const baseUrl = input.callbackBaseUrl.replace(/\/$/, ''); + linear = { + url: `${baseUrl}/linear/webhook`, + webhookSecretSet: pctx.linearWebhookSecretSet ?? false, + note: 'Configure this URL in your Linear team settings under API > Webhooks.', + }; + } + return { trello: trelloResult.status === 'fulfilled' ? trelloResult.value : [], github: githubResult.status === 'fulfilled' ? githubResult.value : [], jira: jiraResult.status === 'fulfilled' ? jiraResult.value : [], sentry, + linear, errors: { trello: trelloResult.status === 'rejected' ? String(trelloResult.reason) : null, github: githubResult.status === 'rejected' ? String(githubResult.reason) : null, jira: jiraResult.status === 'rejected' ? String(jiraResult.reason) : null, + linear: null, }, }; }), @@ -85,6 +99,7 @@ export const webhooksRouter = router({ github?: GitHubWebhook | string; jira?: JiraWebhookInfo | string; sentry?: SentryWebhookInfo; + linear?: LinearWebhookInfo; labelsEnsured?: string[]; } = {}; @@ -158,6 +173,15 @@ export const webhooksRouter = router({ }; } + // Linear — display-only (cannot create programmatically) + if (pctx.pmType === 'linear' && pctx.linearApiKey) { + results.linear = { + url: `${baseUrl}/linear/webhook`, + webhookSecretSet: pctx.linearWebhookSecretSet ?? false, + note: 'Configure this URL manually in your Linear team settings under API > Webhooks.', + }; + } + return results; }), diff --git a/src/api/routers/webhooks/context.ts b/src/api/routers/webhooks/context.ts index 0d3a8636..b4acc193 100644 --- a/src/api/routers/webhooks/context.ts +++ b/src/api/routers/webhooks/context.ts @@ -7,6 +7,7 @@ import { getJiraConfig, getTrelloConfig } from '../../../pm/config.js'; import { verifyProjectOrgAccess } from '../_shared/projectAccess.js'; import type { ProjectContext } from './types.js'; +// biome-ignore lint/complexity/noExcessiveCognitiveComplexity: multi-provider credential resolution export async function resolveProjectContext( projectId: string, userOrgId: string, @@ -55,6 +56,8 @@ export async function resolveProjectContext( webhookSecret: creds.GITHUB_WEBHOOK_SECRET ?? undefined, sentryConfigured, sentryWebhookSecretSet: !!creds.SENTRY_WEBHOOK_SECRET, + linearApiKey: creds.LINEAR_API_KEY ?? undefined, + linearWebhookSecretSet: !!creds.LINEAR_WEBHOOK_SECRET, }; } @@ -65,6 +68,7 @@ export const oneTimeTokensSchema = z trelloToken: z.string().optional(), jiraEmail: z.string().optional(), jiraApiToken: z.string().optional(), + linearApiKey: z.string().optional(), }) .optional(); @@ -77,4 +81,5 @@ export function applyOneTimeTokens(pctx: ProjectContext, tokens: OneTimeTokens): if (tokens.trelloToken) pctx.trelloToken = tokens.trelloToken; if (tokens.jiraEmail) pctx.jiraEmail = tokens.jiraEmail; if (tokens.jiraApiToken) pctx.jiraApiToken = tokens.jiraApiToken; + if (tokens.linearApiKey) pctx.linearApiKey = tokens.linearApiKey; } diff --git a/src/api/routers/webhooks/types.ts b/src/api/routers/webhooks/types.ts index b97f83cd..b3da303a 100644 --- a/src/api/routers/webhooks/types.ts +++ b/src/api/routers/webhooks/types.ts @@ -32,6 +32,12 @@ export interface SentryWebhookInfo { note: string; } +export interface LinearWebhookInfo { + url: string; + webhookSecretSet: boolean; + note: string; +} + export interface ProjectContext { projectId: string; orgId: string; @@ -49,4 +55,6 @@ export interface ProjectContext { webhookSecret?: string; sentryConfigured?: boolean; sentryWebhookSecretSet?: boolean; + linearApiKey?: string; + linearWebhookSecretSet?: boolean; } diff --git a/src/cli/dashboard/webhooks/create.ts b/src/cli/dashboard/webhooks/create.ts index fca76db1..e990aa3b 100644 --- a/src/cli/dashboard/webhooks/create.ts +++ b/src/cli/dashboard/webhooks/create.ts @@ -93,6 +93,23 @@ export default class WebhooksCreate extends DashboardCommand { this.log(' 5. Copy the Client Secret and save it as SENTRY_WEBHOOK_SECRET credential'); } } + + if (result.linear) { + this.log(''); + this.log('Linear (manual setup required):'); + this.log(` Webhook URL: ${result.linear.url}`); + this.log(` Webhook secret: ${result.linear.webhookSecretSet ? 'configured' : 'not set'}`); + this.log(' Steps:'); + this.log(' 1. Go to Linear > Settings > API > Webhooks'); + this.log(' 2. Click "New webhook"'); + this.log(' 3. Set the URL to the Webhook URL above'); + this.log(' 4. Select the desired event types (e.g. Issues, Comments)'); + if (!result.linear.webhookSecretSet) { + this.log( + ' 5. Copy the signing secret and save it as LINEAR_WEBHOOK_SECRET credential', + ); + } + } } catch (err) { this.handleError(err); } diff --git a/src/cli/dashboard/webhooks/list.ts b/src/cli/dashboard/webhooks/list.ts index 495f90cc..683c846e 100644 --- a/src/cli/dashboard/webhooks/list.ts +++ b/src/cli/dashboard/webhooks/list.ts @@ -92,6 +92,16 @@ export default class WebhooksList extends DashboardCommand { } else { this.log(' (not configured)'); } + + this.log(''); + this.log('Linear webhook:'); + if (result.linear) { + this.log(` URL: ${result.linear.url}`); + this.log(` Webhook secret: ${result.linear.webhookSecretSet ? 'configured' : 'not set'}`); + this.log(` ${result.linear.note}`); + } else { + this.log(' (not configured)'); + } } catch (err) { this.handleError(err); } diff --git a/tests/unit/api/routers/webhooks.test.ts b/tests/unit/api/routers/webhooks.test.ts index 18b27735..c377d15d 100644 --- a/tests/unit/api/routers/webhooks.test.ts +++ b/tests/unit/api/routers/webhooks.test.ts @@ -102,6 +102,17 @@ const mockJiraProject = { }, }; +const mockLinearProject = { + id: 'linear-project', + orgId: 'org-1', + repo: 'owner/linear-repo', + pm: { type: 'linear' }, + linear: { + teamId: 'TEAM-123', + statuses: { todo: 'Todo', inProgress: 'In Progress' }, + }, +}; + function setupJiraProjectContext() { mockDbSelect.mockReturnValue({ from: mockDbFrom }); mockDbFrom.mockReturnValue({ where: mockDbWhere }); @@ -115,6 +126,24 @@ function setupJiraProjectContext() { }); } +function setupLinearProjectContext(opts?: { noLinearApiKey?: boolean; webhookSecret?: boolean }) { + mockDbSelect.mockReturnValue({ from: mockDbFrom }); + mockDbFrom.mockReturnValue({ where: mockDbWhere }); + mockDbWhere.mockResolvedValue([{ orgId: 'org-1' }]); + mockFindProjectByIdFromDb.mockResolvedValue(mockLinearProject); + mockGetIntegrationByProjectAndCategory.mockResolvedValue(null); + const creds: Record = { + GITHUB_TOKEN_IMPLEMENTER: 'ghp_test123', + }; + if (!opts?.noLinearApiKey) { + creds.LINEAR_API_KEY = 'lin_api_test123'; + } + if (opts?.webhookSecret) { + creds.LINEAR_WEBHOOK_SECRET = 'linear-secret-abc'; + } + mockGetAllProjectCredentials.mockResolvedValue(creds); +} + function setupProjectContext(opts?: { noTrello?: boolean; noGithub?: boolean; @@ -709,6 +738,7 @@ describe('webhooksRouter', () => { trello: null, github: null, jira: null, + linear: null, }); }); @@ -914,5 +944,171 @@ describe('webhooksRouter', () => { expect(result.errors.github).toBeNull(); expect(result.errors.jira).toBeNull(); }); + + it('list uses linearApiKey oneTimeToken to show Linear webhook info', async () => { + setupLinearProjectContext({ noLinearApiKey: true }); + + mockFetch.mockResolvedValue({ ok: true, json: () => Promise.resolve([]) }); + mockListWebhooks.mockResolvedValue({ data: [] }); + + const caller = createCaller({ user: mockUser, effectiveOrgId: mockUser.orgId }); + const result = await caller.list({ + projectId: 'linear-project', + callbackBaseUrl: 'https://cascade.example.com', + oneTimeTokens: { linearApiKey: 'lin_api_onetime' }, + }); + + expect(result.linear).not.toBeNull(); + expect(result.linear?.url).toBe('https://cascade.example.com/linear/webhook'); + }); + }); + + describe('Linear webhook info', () => { + it('list returns linear webhook info when project uses Linear PM and has linearApiKey', async () => { + setupLinearProjectContext(); + + mockFetch.mockResolvedValue({ ok: true, json: () => Promise.resolve([]) }); + mockListWebhooks.mockResolvedValue({ data: [] }); + + const caller = createCaller({ user: mockUser, effectiveOrgId: mockUser.orgId }); + const result = await caller.list({ + projectId: 'linear-project', + callbackBaseUrl: 'https://cascade.example.com', + }); + + expect(result.linear).not.toBeNull(); + expect(result.linear?.url).toBe('https://cascade.example.com/linear/webhook'); + expect(result.linear?.webhookSecretSet).toBe(false); + expect(result.linear?.note).toContain('Linear'); + }); + + it('list returns linear webhook info with webhookSecretSet true when LINEAR_WEBHOOK_SECRET is set', async () => { + setupLinearProjectContext({ webhookSecret: true }); + + mockFetch.mockResolvedValue({ ok: true, json: () => Promise.resolve([]) }); + mockListWebhooks.mockResolvedValue({ data: [] }); + + const caller = createCaller({ user: mockUser, effectiveOrgId: mockUser.orgId }); + const result = await caller.list({ + projectId: 'linear-project', + callbackBaseUrl: 'https://cascade.example.com', + }); + + expect(result.linear?.webhookSecretSet).toBe(true); + }); + + it('list returns null linear when project uses Linear PM but no linearApiKey', async () => { + setupLinearProjectContext({ noLinearApiKey: true }); + + mockFetch.mockResolvedValue({ ok: true, json: () => Promise.resolve([]) }); + mockListWebhooks.mockResolvedValue({ data: [] }); + + const caller = createCaller({ user: mockUser, effectiveOrgId: mockUser.orgId }); + const result = await caller.list({ + projectId: 'linear-project', + callbackBaseUrl: 'https://cascade.example.com', + }); + + expect(result.linear).toBeNull(); + }); + + it('list returns null linear when no callbackBaseUrl is provided', async () => { + setupLinearProjectContext(); + + mockFetch.mockResolvedValue({ ok: true, json: () => Promise.resolve([]) }); + mockListWebhooks.mockResolvedValue({ data: [] }); + + const caller = createCaller({ user: mockUser, effectiveOrgId: mockUser.orgId }); + const result = await caller.list({ + projectId: 'linear-project', + }); + + expect(result.linear).toBeNull(); + }); + + it('list errors object includes linear: null', async () => { + setupLinearProjectContext(); + + mockFetch.mockResolvedValue({ ok: true, json: () => Promise.resolve([]) }); + mockListWebhooks.mockResolvedValue({ data: [] }); + + const caller = createCaller({ user: mockUser, effectiveOrgId: mockUser.orgId }); + const result = await caller.list({ + projectId: 'linear-project', + callbackBaseUrl: 'https://cascade.example.com', + }); + + expect(result.errors.linear).toBeNull(); + }); + + it('create returns linear webhook info for Linear PM projects', async () => { + setupLinearProjectContext(); + + mockListWebhooks.mockResolvedValue({ data: [] }); + mockCreateWebhook.mockResolvedValue({ + data: { + id: 1, + config: { url: 'http://example.com/github/webhook' }, + events: [], + active: true, + }, + }); + + const caller = createCaller({ user: mockUser, effectiveOrgId: mockUser.orgId }); + const result = await caller.create({ + projectId: 'linear-project', + callbackBaseUrl: 'https://cascade.example.com', + }); + + expect(result.linear).not.toBeUndefined(); + expect(result.linear?.url).toBe('https://cascade.example.com/linear/webhook'); + expect(result.linear?.webhookSecretSet).toBe(false); + expect(result.linear?.note).toContain('Linear'); + }); + + it('create returns linear webhook info with webhookSecretSet true when LINEAR_WEBHOOK_SECRET is set', async () => { + setupLinearProjectContext({ webhookSecret: true }); + + mockListWebhooks.mockResolvedValue({ data: [] }); + mockCreateWebhook.mockResolvedValue({ + data: { + id: 1, + config: { url: 'http://example.com/github/webhook' }, + events: [], + active: true, + }, + }); + + const caller = createCaller({ user: mockUser, effectiveOrgId: mockUser.orgId }); + const result = await caller.create({ + projectId: 'linear-project', + callbackBaseUrl: 'https://cascade.example.com', + }); + + expect(result.linear?.webhookSecretSet).toBe(true); + }); + + it('create does not return linear info for non-Linear PM projects', async () => { + setupProjectContext(); + + mockFetch.mockResolvedValue({ ok: true, json: () => Promise.resolve([]) }); + mockListWebhooks.mockResolvedValue({ data: [] }); + mockCreateWebhook.mockResolvedValue({ + data: { + id: 1, + config: { url: 'http://example.com/github/webhook' }, + events: [], + active: true, + }, + }); + + const caller = createCaller({ user: mockUser, effectiveOrgId: mockUser.orgId }); + const result = await caller.create({ + projectId: 'my-project', + callbackBaseUrl: 'https://cascade.example.com', + }); + + expect(result.linear).toBeUndefined(); + }); }); }); diff --git a/tools/setup-webhooks.ts b/tools/setup-webhooks.ts index 59c676ae..bcca8fcc 100644 --- a/tools/setup-webhooks.ts +++ b/tools/setup-webhooks.ts @@ -53,11 +53,14 @@ interface ProjectContext { projectId: string; orgId: string; repo: string | null; + pmType: string; boardId: string; trelloApiKey: string; trelloToken: string; githubToken: string; webhookSecret?: string; + linearApiKey?: string; + linearWebhookSecretSet: boolean; } async function resolveProjectContext(projectId: string): Promise { @@ -73,8 +76,9 @@ async function resolveProjectContext(projectId: string): Promise const trelloApiKey = credMap.TRELLO_API_KEY; const trelloToken = credMap.TRELLO_TOKEN; const githubToken = credMap.GITHUB_TOKEN_IMPLEMENTER ?? credMap.GITHUB_TOKEN; + const pmType = project.pm?.type ?? 'trello'; - if (!trelloApiKey || !trelloToken) { + if (pmType === 'trello' && (!trelloApiKey || !trelloToken)) { console.warn( 'Warning: TRELLO_API_KEY or TRELLO_TOKEN not found — Trello operations will be skipped', ); @@ -87,11 +91,14 @@ async function resolveProjectContext(projectId: string): Promise projectId, orgId: project.orgId, repo: project.repo ?? null, - boardId: project.trello.boardId, + pmType, + boardId: project.trello?.boardId ?? '', trelloApiKey: trelloApiKey ?? '', trelloToken: trelloToken ?? '', githubToken: githubToken ?? '', webhookSecret: credMap.GITHUB_WEBHOOK_SECRET ?? undefined, + linearApiKey: credMap.LINEAR_API_KEY ?? undefined, + linearWebhookSecretSet: !!credMap.LINEAR_WEBHOOK_SECRET, }; } @@ -240,6 +247,7 @@ function printGitHubWebhooks(webhooks: GitHubWebhook[]): void { // --- Command handlers --- +// biome-ignore lint/complexity/noExcessiveCognitiveComplexity: multi-provider webhook listing async function handleList(args: string[]): Promise { const projectId = args[1]; if (!projectId) { @@ -254,10 +262,13 @@ async function handleList(args: string[]): Promise { console.log(`Project: ${ctx.projectId} (org: ${ctx.orgId})`); console.log(`Repo: ${ctx.repo ?? '(none - email-only project)'}`); - console.log(`Trello board: ${ctx.boardId}`); + console.log(`PM type: ${ctx.pmType}`); + if (ctx.pmType === 'trello') { + console.log(`Trello board: ${ctx.boardId}`); + } console.log(''); - if (!githubOnly && ctx.trelloApiKey && ctx.trelloToken) { + if (!githubOnly && ctx.trelloApiKey && ctx.trelloToken && ctx.pmType === 'trello') { printTrelloWebhooks(await trelloListWebhooks(ctx)); } @@ -267,6 +278,28 @@ async function handleList(args: string[]): Promise { console.log('GitHub webhooks: (skipped - no repo configured)'); console.log(''); } + + // Linear — informational only (webhooks must be configured in Linear team settings) + if (ctx.pmType === 'linear') { + console.log('Linear webhook:'); + if (ctx.linearApiKey) { + const callbackBaseUrl = process.env.WEBHOOK_CALLBACK_BASE_URL; + if (callbackBaseUrl) { + const baseUrl = callbackBaseUrl.replace(/\/$/, ''); + console.log(` Webhook URL: ${baseUrl}/linear/webhook`); + console.log(` Webhook secret: ${ctx.linearWebhookSecretSet ? 'configured' : 'not set'}`); + } else { + console.log( + ' Webhook URL: /linear/webhook (set WEBHOOK_CALLBACK_BASE_URL to see full URL)', + ); + console.log(` Webhook secret: ${ctx.linearWebhookSecretSet ? 'configured' : 'not set'}`); + } + console.log(' Note: Configure this URL in your Linear team settings under API > Webhooks.'); + } else { + console.log(' (LINEAR_API_KEY not configured)'); + } + console.log(''); + } } async function createTrelloWebhookIfNeeded( @@ -317,7 +350,7 @@ async function handleCreate(args: string[]): Promise { const baseUrl = callbackBaseUrl.replace(/\/$/, ''); // Trello webhook - if (!githubOnly && ctx.trelloApiKey && ctx.trelloToken) { + if (!githubOnly && ctx.trelloApiKey && ctx.trelloToken && ctx.pmType === 'trello') { await createTrelloWebhookIfNeeded(ctx, `${baseUrl}/webhook/trello`); } @@ -327,6 +360,22 @@ async function handleCreate(args: string[]): Promise { } else if (!trelloOnly && !ctx.repo) { console.log('Skipping GitHub webhook: no repo configured for this project'); } + + // Linear — display-only (cannot create programmatically) + if (ctx.pmType === 'linear' && ctx.linearApiKey) { + console.log(''); + console.log('Linear (manual setup required):'); + console.log(` Webhook URL: ${baseUrl}/linear/webhook`); + console.log(` Webhook secret: ${ctx.linearWebhookSecretSet ? 'configured' : 'not set'}`); + console.log(' Steps:'); + console.log(' 1. Go to Linear > Settings > API > Webhooks'); + console.log(' 2. Click "New webhook"'); + console.log(' 3. Set the URL to the Webhook URL above'); + console.log(' 4. Select the desired event types (e.g. Issues, Comments)'); + if (!ctx.linearWebhookSecretSet) { + console.log(' 5. Copy the signing secret and save it as LINEAR_WEBHOOK_SECRET credential'); + } + } } async function deleteTrelloWebhooksForUrl(ctx: ProjectContext, callbackUrl: string): Promise { From 310a6f770c044146ed4713fdc58112d15699d735 Mon Sep 17 00:00:00 2001 From: aaight Date: Wed, 15 Apr 2026 11:00:51 +0200 Subject: [PATCH 39/52] fix(dashboard): comprehensive dark mode fixes across all affected components (#1110) Co-authored-by: Cascade Bot --- tests/unit/web/chart-colors.test.ts | 68 +++++++++++++++++++ .../web/project-work-duration-chart.test.ts | 3 + tests/unit/web/work-item-duration-bar.test.ts | 5 +- web/src/components/logs/log-viewer.tsx | 2 +- .../projects/pm-wizard-common-steps.tsx | 6 +- .../projects/pm-wizard-trello-steps.tsx | 4 +- .../projects/project-general-form.tsx | 5 +- .../projects/project-work-table.tsx | 4 +- .../components/projects/projects-table.tsx | 2 +- web/src/components/projects/stats-filters.tsx | 2 +- web/src/components/projects/wizard-shared.tsx | 4 +- .../projects/work-item-duration-bar.tsx | 19 ++++-- .../runs/project-work-duration-chart.tsx | 28 ++++++-- web/src/components/runs/run-filters.tsx | 2 +- .../components/runs/work-item-cost-chart.tsx | 28 +++++--- .../runs/work-item-duration-chart.tsx | 4 +- .../settings/agent-definition-sections.tsx | 2 +- .../settings/agent-definition-shared.tsx | 2 +- .../components/settings/user-form-dialog.tsx | 2 +- web/src/components/settings/users-table.tsx | 2 +- web/src/components/ui/sonner.tsx | 6 +- web/src/lib/chart-colors.ts | 50 +++++++++----- web/src/lib/use-chart-colors.ts | 24 +++++++ web/src/routes/global/webhook-logs.tsx | 2 +- web/src/routes/prs/$projectId.$prNumber.tsx | 2 +- 25 files changed, 217 insertions(+), 61 deletions(-) create mode 100644 tests/unit/web/chart-colors.test.ts create mode 100644 web/src/lib/use-chart-colors.ts diff --git a/tests/unit/web/chart-colors.test.ts b/tests/unit/web/chart-colors.test.ts new file mode 100644 index 00000000..6779eb8d --- /dev/null +++ b/tests/unit/web/chart-colors.test.ts @@ -0,0 +1,68 @@ +import { describe, expect, it } from 'vitest'; + +import { + agentTypeLabel, + CHART_PALETTE_DARK, + CHART_PALETTE_LIGHT, + getAgentColor, +} from '../../../web/src/lib/chart-colors.js'; + +describe('CHART_PALETTE_LIGHT and CHART_PALETTE_DARK', () => { + it('both palettes have the same length', () => { + expect(CHART_PALETTE_LIGHT.length).toBe(CHART_PALETTE_DARK.length); + }); + + it('all light palette entries are valid hex colors', () => { + for (const color of CHART_PALETTE_LIGHT) { + expect(color).toMatch(/^#[0-9a-f]{6}$/i); + } + }); + + it('all dark palette entries are valid hex colors', () => { + for (const color of CHART_PALETTE_DARK) { + expect(color).toMatch(/^#[0-9a-f]{6}$/i); + } + }); + + it('light and dark palettes are different (not accidentally the same)', () => { + // At least one color should differ between the palettes + const allSame = CHART_PALETTE_LIGHT.every((c, i) => c === CHART_PALETTE_DARK[i]); + expect(allSame).toBe(false); + }); +}); + +describe('getAgentColor', () => { + it('uses light palette by default', () => { + expect(getAgentColor('planning')).toBe(CHART_PALETTE_LIGHT[0]); + }); + + it('uses provided palette when specified', () => { + expect(getAgentColor('planning', CHART_PALETTE_DARK)).toBe(CHART_PALETTE_DARK[0]); + }); + + it('returns consistent color for known agent types', () => { + const knownTypes = ['planning', 'implementation', 'review', 'splitting', 'debug']; + for (const agentType of knownTypes) { + const color = getAgentColor(agentType); + expect(color).toMatch(/^#[0-9a-f]{6}$/i); + } + }); + + it('returns consistent hash-based color for unknown types', () => { + const colorA = getAgentColor('unknown-agent-type'); + const colorB = getAgentColor('unknown-agent-type'); + expect(colorA).toBe(colorB); + }); + + it('returns a palette color for unknown types', () => { + const color = getAgentColor('some-custom-agent'); + expect(CHART_PALETTE_LIGHT).toContain(color); + }); +}); + +describe('agentTypeLabel', () => { + it('converts kebab-case to Title Case', () => { + expect(agentTypeLabel('respond-to-review')).toBe('Respond To Review'); + expect(agentTypeLabel('implementation')).toBe('Implementation'); + }); +}); diff --git a/tests/unit/web/project-work-duration-chart.test.ts b/tests/unit/web/project-work-duration-chart.test.ts index c3e04984..316c706b 100644 --- a/tests/unit/web/project-work-duration-chart.test.ts +++ b/tests/unit/web/project-work-duration-chart.test.ts @@ -22,6 +22,9 @@ vi.mock('@/lib/chart-colors.js', () => ({ agentTypeLabel: (t: string) => t, getAgentColor: () => '#000', })); +vi.mock('@/lib/use-chart-colors.js', () => ({ + useChartColors: () => () => '#000', +})); vi.mock('@/lib/utils.js', () => ({ formatDuration: (ms: number) => `${ms}ms` })); import { buildDurationChartData } from '../../../web/src/components/runs/project-work-duration-chart.js'; diff --git a/tests/unit/web/work-item-duration-bar.test.ts b/tests/unit/web/work-item-duration-bar.test.ts index 4cf4e35d..8a8b86ea 100644 --- a/tests/unit/web/work-item-duration-bar.test.ts +++ b/tests/unit/web/work-item-duration-bar.test.ts @@ -1,6 +1,6 @@ import { describe, expect, it, vi } from 'vitest'; -// Mock chart-colors and utils (node environment, no DOM) +// Mock chart-colors, use-chart-colors, and utils (node environment, no DOM) vi.mock('@/lib/chart-colors.js', () => ({ agentTypeLabel: (t: string) => t @@ -9,6 +9,9 @@ vi.mock('@/lib/chart-colors.js', () => ({ .join(' '), getAgentColor: (t: string) => (t === 'implementation' ? '#3aada0' : '#e8642a'), })); +vi.mock('@/lib/use-chart-colors.js', () => ({ + useChartColors: () => (t: string) => (t === 'implementation' ? '#3aada0' : '#e8642a'), +})); vi.mock('@/lib/utils.js', () => ({ formatDuration: (ms: number | null | undefined) => (ms == null ? '-' : `${ms}ms`), })); diff --git a/web/src/components/logs/log-viewer.tsx b/web/src/components/logs/log-viewer.tsx index ad9a793c..1e14a43d 100644 --- a/web/src/components/logs/log-viewer.tsx +++ b/web/src/components/logs/log-viewer.tsx @@ -55,7 +55,7 @@ export function LogViewer({ runId }: LogViewerProps) {
{logContent ? ( -
+					
 						{logContent}
 					
) : ( diff --git a/web/src/components/projects/pm-wizard-common-steps.tsx b/web/src/components/projects/pm-wizard-common-steps.tsx index 6032faef..e87f918f 100644 --- a/web/src/components/projects/pm-wizard-common-steps.tsx +++ b/web/src/components/projects/pm-wizard-common-steps.tsx @@ -51,7 +51,11 @@ function CopyButton({ text }: { text: string }) { className="inline-flex items-center gap-1 shrink-0 rounded px-2 py-1 text-xs text-muted-foreground hover:text-foreground hover:bg-accent transition-colors" title="Copy to clipboard" > - {copied ? : } + {copied ? ( + + ) : ( + + )} {copied ? 'Copied' : 'Copy'} ); diff --git a/web/src/components/projects/pm-wizard-trello-steps.tsx b/web/src/components/projects/pm-wizard-trello-steps.tsx index 22aa072b..94ab9496 100644 --- a/web/src/components/projects/pm-wizard-trello-steps.tsx +++ b/web/src/components/projects/pm-wizard-trello-steps.tsx @@ -120,8 +120,8 @@ export function TrelloCredentialsStep({ {state.trelloToken ? (
- - Token set + + Token set
+ +

+ If you also set a Linear project scope in the Board / Project Selection + step, CASCADE applies that filter on its side after receiving each webhook — your Linear + webhook configuration stays team-scoped and unchanged. +

); } diff --git a/web/src/components/projects/pm-wizard-hooks.ts b/web/src/components/projects/pm-wizard-hooks.ts index 73353ee6..714ae643 100644 --- a/web/src/components/projects/pm-wizard-hooks.ts +++ b/web/src/components/projects/pm-wizard-hooks.ts @@ -9,11 +9,13 @@ import { API_URL } from '@/lib/api.js'; import { trpc, trpcClient } from '@/lib/trpc.js'; import { getCredentialRoles } from '../../../../src/config/integrationRoles.js'; import type { + LinearProjectOption, LinearTeamDetails, LinearTeamOption, WizardAction, WizardState, } from './pm-wizard-state.js'; +import { buildLinearIntegrationConfig } from './pm-wizard-state.js'; // ============================================================================ // Trello Discovery @@ -247,10 +249,34 @@ export function useLinearDiscovery( }, }); + const linearProjectsMutation = useMutation({ + mutationFn: (teamId: string) => { + if (state.isEditing && state.hasStoredCredentials && !state.linearApiKey) { + return trpcClient.integrationsDiscovery.linearProjectsByProject.mutate({ + projectId, + teamId, + }); + } + if (!state.linearApiKey) { + throw new Error('Enter your API key before fetching projects'); + } + return trpcClient.integrationsDiscovery.linearProjects.mutate({ + apiKey: state.linearApiKey, + teamId, + }); + }, + onSuccess: (projects) => + dispatch({ + type: 'SET_LINEAR_PROJECTS', + projects: projects as LinearProjectOption[], + }), + }); + const handleTeamSelect = (teamId: string) => { dispatch({ type: 'SET_LINEAR_TEAM_ID', id: teamId }); if (teamId) { linearDetailsMutation.mutate(teamId); + linearProjectsMutation.mutate(teamId); } }; @@ -280,10 +306,18 @@ export function useLinearDiscovery( ) { linearDetailsMutation.mutate(state.linearTeamId); } + if ( + state.linearTeamId && + state.linearProjects.length === 0 && + canFetch && + !linearProjectsMutation.isPending + ) { + linearProjectsMutation.mutate(state.linearTeamId); + } // eslint-disable-next-line react-hooks/exhaustive-deps }, [state.isEditing, state.linearTeamId, state.hasStoredCredentials]); - return { linearTeamsMutation, linearDetailsMutation, handleTeamSelect }; + return { linearTeamsMutation, linearDetailsMutation, linearProjectsMutation, handleTeamSelect }; } // ============================================================================ @@ -595,11 +629,7 @@ export function useSaveMutation(projectId: string, state: WizardState) { ...(state.trelloCostFieldId ? { customFields: { cost: state.trelloCostFieldId } } : {}), }; } else if (state.provider === 'linear') { - config = { - teamId: state.linearTeamId, - statuses: state.linearStatusMappings, - ...(Object.keys(state.linearLabels).length > 0 ? { labels: state.linearLabels } : {}), - }; + config = buildLinearIntegrationConfig(state); } else { config = { projectKey: state.jiraProjectKey, diff --git a/web/src/components/projects/pm-wizard-linear-steps.tsx b/web/src/components/projects/pm-wizard-linear-steps.tsx index a1165335..cad80b16 100644 --- a/web/src/components/projects/pm-wizard-linear-steps.tsx +++ b/web/src/components/projects/pm-wizard-linear-steps.tsx @@ -81,35 +81,67 @@ export function LinearCredentialsStep({ export function LinearTeamStep({ state, onTeamSelect, + dispatch, linearTeamsMutation, linearDetailsMutation, + linearProjectsMutation, }: { state: WizardState; onTeamSelect: (id: string) => void; + dispatch: React.Dispatch; linearTeamsMutation: UseMutationResult; linearDetailsMutation: UseMutationResult; + linearProjectsMutation: UseMutationResult; }) { return ( -
- - ({ - label: t.name, - value: t.id, - detail: t.key, - }))} - value={state.linearTeamId} - onChange={onTeamSelect} - placeholder="Select a Linear team..." - isLoading={linearTeamsMutation.isPending} - error={linearTeamsMutation.isError ? (linearTeamsMutation.error as Error).message : null} - onRetry={() => - (linearTeamsMutation as UseMutationResult).mutate() - } - /> - {state.linearTeamId && linearDetailsMutation.isPending && ( -
- Loading team details... +
+
+ + ({ + label: t.name, + value: t.id, + detail: t.key, + }))} + value={state.linearTeamId} + onChange={onTeamSelect} + placeholder="Select a Linear team..." + isLoading={linearTeamsMutation.isPending} + error={linearTeamsMutation.isError ? (linearTeamsMutation.error as Error).message : null} + onRetry={() => + (linearTeamsMutation as UseMutationResult).mutate() + } + /> + {state.linearTeamId && linearDetailsMutation.isPending && ( +
+ Loading team details... +
+ )} +
+ + {state.linearTeamId && ( +
+ + ({ + label: p.name, + value: p.id, + }))} + value={state.linearProjectId} + onChange={(v) => dispatch({ type: 'SET_LINEAR_PROJECT_ID', value: v })} + placeholder="No project scope — all team issues" + isLoading={linearProjectsMutation.isPending} + error={ + linearProjectsMutation.isError + ? (linearProjectsMutation.error as Error).message + : null + } + onRetry={() => linearProjectsMutation.mutate(state.linearTeamId)} + /> +

+ Optional — leave empty to process all issues in this team. When set, CASCADE only + responds to issues that belong to this Linear Project. +

)}
diff --git a/web/src/components/projects/pm-wizard-state.ts b/web/src/components/projects/pm-wizard-state.ts index 0214728e..791501bb 100644 --- a/web/src/components/projects/pm-wizard-state.ts +++ b/web/src/components/projects/pm-wizard-state.ts @@ -37,6 +37,13 @@ export interface LinearTeamOption { key: string; } +export interface LinearProjectOption { + id: string; + name: string; + icon: string | null; + color: string | null; +} + export interface LinearTeamDetails { states: Array<{ id: string; name: string; type: string }>; labels: Array<{ id: string; name: string; color: string }>; @@ -62,6 +69,8 @@ export interface WizardState { jiraProjects: JiraProjectOption[]; linearTeamId: string; linearTeams: LinearTeamOption[]; + linearProjectId: string; + linearProjects: LinearProjectOption[]; // Step 4: Field mapping trelloBoardDetails: TrelloBoardDetails | null; jiraProjectDetails: JiraProjectDetails | null; @@ -109,6 +118,8 @@ export type WizardAction = | { type: 'SET_LINEAR_TEAMS'; teams: LinearTeamOption[] } | { type: 'SET_LINEAR_TEAM_ID'; id: string } | { type: 'SET_LINEAR_TEAM_DETAILS'; details: LinearTeamDetails | null } + | { type: 'SET_LINEAR_PROJECTS'; projects: LinearProjectOption[] } + | { type: 'SET_LINEAR_PROJECT_ID'; value: string } | { type: 'SET_TRELLO_BOARD_DETAILS'; details: TrelloBoardDetails | null } | { type: 'SET_JIRA_PROJECT_DETAILS'; details: JiraProjectDetails | null } | { type: 'SET_TRELLO_LIST_MAPPING'; key: string; value: string } @@ -165,6 +176,8 @@ export function createInitialState(): WizardState { jiraProjects: [], linearTeamId: '', linearTeams: [], + linearProjectId: '', + linearProjects: [], trelloBoardDetails: null, jiraProjectDetails: null, linearTeamDetails: null, @@ -266,7 +279,16 @@ export const wizardReducer: Reducer = (state, action) linearTeamId: action.id, linearTeamDetails: null, linearStatusMappings: {}, + // A new team invalidates the project list and any chosen project — + // Linear projects are team-scoped, so the previous selection is + // not guaranteed to belong to the new team. + linearProjectId: '', + linearProjects: [], }; + case 'SET_LINEAR_PROJECTS': + return { ...state, linearProjects: action.projects }; + case 'SET_LINEAR_PROJECT_ID': + return { ...state, linearProjectId: action.value }; case 'SET_LINEAR_TEAM_DETAILS': return { ...state, linearTeamDetails: action.details }; case 'SET_TRELLO_BOARD_DETAILS': @@ -405,6 +427,7 @@ export function buildEditState( configuredKeys.has('JIRA_EMAIL') && configuredKeys.has('JIRA_API_TOKEN'); } else if (provider === 'linear') { editState.linearTeamId = (initialConfig.teamId as string) ?? ''; + editState.linearProjectId = (initialConfig.projectId as string) ?? ''; const statuses = initialConfig.statuses as Record | undefined; if (statuses) editState.linearStatusMappings = statuses; @@ -456,6 +479,19 @@ export function areCredentialsReady(state: WizardState): boolean { return !!state.linearApiKey; } +/** + * Build the Linear integration config payload from wizard state. + * Pure function so it can be unit-tested without the React runtime. + */ +export function buildLinearIntegrationConfig(state: WizardState): Record { + return { + teamId: state.linearTeamId, + ...(state.linearProjectId ? { projectId: state.linearProjectId } : {}), + statuses: state.linearStatusMappings, + ...(Object.keys(state.linearLabels).length > 0 ? { labels: state.linearLabels } : {}), + }; +} + /** * Map the provider's webhook listing into the shape expected by `WebhookStep`. * Linear webhooks are configured manually outside the wizard; Trello/JIRA come diff --git a/web/src/components/projects/pm-wizard.tsx b/web/src/components/projects/pm-wizard.tsx index 12cd4c26..986da19d 100644 --- a/web/src/components/projects/pm-wizard.tsx +++ b/web/src/components/projects/pm-wizard.tsx @@ -145,12 +145,8 @@ export function PMWizard({ advanceToStep, projectId, ); - const { linearTeamsMutation, linearDetailsMutation, handleTeamSelect } = useLinearDiscovery( - state, - dispatch, - advanceToStep, - projectId, - ); + const { linearTeamsMutation, linearDetailsMutation, linearProjectsMutation, handleTeamSelect } = + useLinearDiscovery(state, dispatch, advanceToStep, projectId); const { createLabelMutation, createMissingLabelsMutation } = useTrelloLabelCreation( state, dispatch, @@ -331,8 +327,10 @@ export function PMWizard({ ) : ( Date: Wed, 15 Apr 2026 21:46:02 +0000 Subject: [PATCH 48/52] fix(linear): store state IDs, not names, in status mappings MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Linear webhooks deliver workflow-state UUIDs in data.stateId, but the PM wizard's status-mapping dropdown was saving display names as the option value. The status-changed trigger does strict-equality matching on the saved value, so every Linear status transition since the integration landed has been silently no-oping. JIRA gets away with names because JIRA webhooks deliver names; Linear does not. Changed the option value to s.id (s.name remains as the label so the dropdown still reads naturally). Added a regression test asserting every state ID appears as an option value and no state name does, plus a WHY comment to anchor the contract for future maintainers. Operator action required after deploy: re-run the Status Mappings wizard step on existing Linear-backed projects. Each saved name will no longer match a value, so dropdowns will appear empty — re-pick from the dropdown and save. New mappings persist as UUIDs. Co-Authored-By: Claude Opus 4.6 (1M context) --- .../web/linear-field-mapping-step.test.ts | 32 ++++++++++++++++--- .../projects/pm-wizard-linear-steps.tsx | 5 ++- 2 files changed, 32 insertions(+), 5 deletions(-) diff --git a/tests/unit/web/linear-field-mapping-step.test.ts b/tests/unit/web/linear-field-mapping-step.test.ts index 75afbc25..b5d98d66 100644 --- a/tests/unit/web/linear-field-mapping-step.test.ts +++ b/tests/unit/web/linear-field-mapping-step.test.ts @@ -80,12 +80,36 @@ describe('LinearFieldMappingStep — status slots', () => { it('reflects persisted mappings on initial render', () => { const html = render({ linearStatusMappings: { - splitting: 'Splitting', - planning: 'Planning', + splitting: 'st-sp', + planning: 'st-pl', }, }); // The persisted values should appear as selected option values. - expect(html).toContain('value="Splitting"'); - expect(html).toContain('value="Planning"'); + expect(html).toContain('value="st-sp"'); + expect(html).toContain('value="st-pl"'); + }); + + // Regression: Linear webhooks deliver workflow-state UUIDs in `data.stateId`, + // not display names. Storing names in the mapping makes the trigger handler's + // strict equality check (src/triggers/linear/status-changed.ts) silently no-op. + it('uses state IDs (not names) as dropdown option values', () => { + const html = render(); + // Each Linear workflow state's ID must appear as an option value. + for (const id of ['st-bl', 'st-sp', 'st-pl', 'st-td', 'st-ip', 'st-ir', 'st-dn', 'st-mg']) { + expect(html, `option value="${id}" missing`).toContain(`value="${id}"`); + } + // State names must NOT appear as option values (they may still be option labels). + for (const name of [ + 'Backlog', + 'Splitting', + 'Planning', + 'Todo', + 'In Progress', + 'In Review', + 'Done', + 'Merged', + ]) { + expect(html, `state name "${name}" must not be a value`).not.toContain(`value="${name}"`); + } }); }); diff --git a/web/src/components/projects/pm-wizard-linear-steps.tsx b/web/src/components/projects/pm-wizard-linear-steps.tsx index cad80b16..4729d23f 100644 --- a/web/src/components/projects/pm-wizard-linear-steps.tsx +++ b/web/src/components/projects/pm-wizard-linear-steps.tsx @@ -172,10 +172,13 @@ export function LinearFieldMappingStep({ ({ label: s.name, - value: s.name, + value: s.id, })) ?? [] } value={state.linearStatusMappings[slot] ?? ''} From 00fcca75b47bf98bffb641508d5141a38521f481 Mon Sep 17 00:00:00 2001 From: Zbigniew Sobiecki Date: Wed, 15 Apr 2026 21:58:19 +0000 Subject: [PATCH 49/52] fix(linear): pass projectId to extractProjectIdFromJob for linear jobs Linear jobs were not handled by extractProjectIdFromJob, so the router returned null and skipped credential resolution. Workers spawned with no credentials, then fell back to the DB and died with "Credential is encrypted but CREDENTIAL_MASTER_KEY is not set" because workers intentionally don't receive the master key. Linear jobs already carry projectId on their payload (set by LinearRouterAdapter.buildJob), so the fix is a single branch in the projectId extractor. Co-Authored-By: Claude Opus 4.6 (1M context) --- src/router/worker-env.ts | 2 +- tests/unit/router/container-manager.test.ts | 5 +++++ 2 files changed, 6 insertions(+), 1 deletion(-) diff --git a/src/router/worker-env.ts b/src/router/worker-env.ts index d7b7f055..182bea74 100644 --- a/src/router/worker-env.ts +++ b/src/router/worker-env.ts @@ -23,7 +23,7 @@ export async function extractProjectIdFromJob(data: CascadeJob): Promise { expect(await extractProjectIdFromJob(job)).toBe('proj-jira'); }); + it('returns projectId for linear jobs', async () => { + const job = { type: 'linear', projectId: 'proj-linear' } as unknown as CascadeJob; + expect(await extractProjectIdFromJob(job)).toBe('proj-linear'); + }); + it('returns projectId resolved from repo for github jobs', async () => { const job = { type: 'github', repoFullName: 'owner/repo' } as CascadeJob; mockFindProjectByRepo.mockResolvedValue({ id: 'proj-gh' } as never); From 814092964c036e2f41b6be46a9f0348aa642bf04 Mon Sep 17 00:00:00 2001 From: Zbigniew Sobiecki Date: Wed, 15 Apr 2026 22:13:49 +0000 Subject: [PATCH 50/52] fix(linear): send personal API keys without Bearer prefix MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Linear personal API keys (lin_api_*) are sent bare in the Authorization header — the `Bearer` prefix is OAuth-only, and using it with personal keys triggers HTTP 400. The router's two Linear API call sites (platformClients/linear.ts, bot-identity-resolvers.ts) had diverged from the canonical client (src/linear/client.ts) and used the OAuth pattern, breaking acknowledgment-comment posting and silently disabling the Linear bot-identity self-loop check. Also: - Fix the misleading docblock at src/linear/client.ts:8 that documented `Bearer ` while the code correctly used a bare key — future maintainers would have copied the doc and reintroduced the bug. - Improve linearGraphQL error messages in both the canonical and the router-side helpers to include the response body. Without it the failure surface was just an HTTP status, which made this very bug invisible until source-comparison. Test coverage: - Asserts each of postComment / deleteComment / updateComment sends bare API key, no Bearer prefix. - Asserts the GraphQL mutation body and variables. - Asserts the warning on HTTP failure includes the response body so diagnostics aren't lost again. Out of scope (separate follow-up): linearLabels are stored as free-text names but Linear's issueUpdate.labelIds requires UUIDs, so the `cascade-processing` label isn't being applied. Same shape as the status-mapping bug — needs a wizard UX change to fetch + present a label dropdown. Co-Authored-By: Claude Opus 4.6 (1M context) --- src/linear/client.ts | 6 +- src/router/bot-identity-resolvers.ts | 3 +- src/router/platformClients/linear.ts | 7 +- tests/unit/router/platformClients.test.ts | 126 ++++++++++++++++++++++ 4 files changed, 137 insertions(+), 5 deletions(-) diff --git a/src/linear/client.ts b/src/linear/client.ts index e95366fe..144d53d4 100644 --- a/src/linear/client.ts +++ b/src/linear/client.ts @@ -5,7 +5,8 @@ * are scoped per-request via withLinearCredentials(). * * API endpoint: https://api.linear.app/graphql - * Auth: Authorization: Bearer + * Auth: Authorization: (personal API keys are sent bare; `Bearer` + * is OAuth-only and triggers HTTP 400 with personal keys.) */ import { AsyncLocalStorage } from 'node:async_hooks'; @@ -68,7 +69,8 @@ async function linearGraphQL(query: string, variables?: Record ''); + throw new Error(`Linear API HTTP error ${response.status}: ${body}`); } const json = (await response.json()) as GraphQLResponse; diff --git a/src/router/bot-identity-resolvers.ts b/src/router/bot-identity-resolvers.ts index 68f04cd2..bc425e2b 100644 --- a/src/router/bot-identity-resolvers.ts +++ b/src/router/bot-identity-resolvers.ts @@ -95,7 +95,8 @@ export async function resolveLinearBotUserId(projectId: string): Promise ''); + throw new Error(`Linear API HTTP error ${response.status}: ${body}`); } const json = (await response.json()) as { diff --git a/tests/unit/router/platformClients.test.ts b/tests/unit/router/platformClients.test.ts index ecaad36b..d37ac107 100644 --- a/tests/unit/router/platformClients.test.ts +++ b/tests/unit/router/platformClients.test.ts @@ -31,6 +31,7 @@ vi.mock('../../../src/utils/logging.js', () => ({ import { findProjectById, getIntegrationCredential } from '../../../src/config/provider.js'; import { + LinearPlatformClient, resolveGitHubHeaders, resolveJiraCredentials, resolveTrelloCredentials, @@ -54,6 +55,27 @@ const MOCK_CREDENTIALS: Record = { 'pm/api_token': 'jira-api-token', }; +const LINEAR_API_KEY = 'lin_api_test123'; + +function mockLinearApiKey() { + mockGetIntegrationCredential.mockImplementation(async (_projectId, category, _provider, role) => { + if (category === 'pm' && role === 'api_key') return LINEAR_API_KEY; + throw new Error(`Credential '${category}/${role}' not found`); + }); +} + +function lastFetchAuth(): unknown { + const call = mockFetch.mock.calls[mockFetch.mock.calls.length - 1]; + const init = call?.[1] as { headers?: Record } | undefined; + return init?.headers?.Authorization; +} + +function lastFetchBody(): { query?: string; variables?: unknown } { + const call = mockFetch.mock.calls[mockFetch.mock.calls.length - 1]; + const init = call?.[1] as { body?: string } | undefined; + return init?.body ? JSON.parse(init.body) : {}; +} + const MOCK_PROJECT_WITH_JIRA = { id: 'proj1', name: 'Test', @@ -300,3 +322,107 @@ describe('TrelloPlatformClient', () => { }); }); }); + +// --------------------------------------------------------------------------- +// LinearPlatformClient +// --------------------------------------------------------------------------- + +describe('LinearPlatformClient', () => { + beforeEach(() => { + mockLinearApiKey(); + }); + + describe('postComment', () => { + it('sends bare API key (no Bearer prefix) — Linear personal API keys are not OAuth tokens', async () => { + mockFetch.mockResolvedValueOnce({ + ok: true, + json: async () => ({ + data: { commentCreate: { success: true, comment: { id: 'c-new' } } }, + }), + }); + + const client = new LinearPlatformClient('proj1'); + const id = await client.postComment('issue-uuid-1', 'hello'); + + expect(id).toBe('c-new'); + expect(lastFetchAuth()).toBe(LINEAR_API_KEY); + expect(lastFetchAuth()).not.toMatch(/^Bearer\s/); + }); + + it('posts the commentCreate mutation with issueId and body variables', async () => { + mockFetch.mockResolvedValueOnce({ + ok: true, + json: async () => ({ + data: { commentCreate: { success: true, comment: { id: 'c-1' } } }, + }), + }); + + const client = new LinearPlatformClient('proj1'); + await client.postComment('issue-uuid-2', 'Processing this issue'); + + const body = lastFetchBody(); + expect(body.query).toContain('commentCreate'); + expect(body.variables).toEqual({ + issueId: 'issue-uuid-2', + body: 'Processing this issue', + }); + }); + + it('logs the response body when Linear returns an HTTP error so the failure is diagnosable', async () => { + mockFetch.mockResolvedValueOnce({ + ok: false, + status: 400, + text: async () => '{"error":"bad token"}', + }); + + const client = new LinearPlatformClient('proj1'); + const id = await client.postComment('issue-uuid-3', 'msg'); + + expect(id).toBeNull(); + expect(mockLogger.warn).toHaveBeenCalledWith( + expect.stringContaining('Failed to post Linear comment'), + expect.stringContaining('bad token'), + ); + }); + + it('returns null without calling fetch when credentials are missing', async () => { + mockGetIntegrationCredential.mockRejectedValue(new Error('not found')); + + const client = new LinearPlatformClient('proj1'); + const id = await client.postComment('issue-uuid-4', 'msg'); + + expect(id).toBeNull(); + expect(mockFetch).not.toHaveBeenCalled(); + }); + }); + + describe('deleteComment', () => { + it('sends bare API key for delete', async () => { + mockFetch.mockResolvedValueOnce({ + ok: true, + json: async () => ({ data: { commentDelete: { success: true } } }), + }); + + const client = new LinearPlatformClient('proj1'); + await client.deleteComment('issue-uuid-1', 'comment-abc'); + + expect(lastFetchAuth()).toBe(LINEAR_API_KEY); + expect(lastFetchAuth()).not.toMatch(/^Bearer\s/); + }); + }); + + describe('updateComment', () => { + it('sends bare API key for update', async () => { + mockFetch.mockResolvedValueOnce({ + ok: true, + json: async () => ({ data: { commentUpdate: { success: true } } }), + }); + + const client = new LinearPlatformClient('proj1'); + await client.updateComment('comment-abc', 'edited'); + + expect(lastFetchAuth()).toBe(LINEAR_API_KEY); + expect(lastFetchAuth()).not.toMatch(/^Bearer\s/); + }); + }); +}); From 078ac9c3e8d8afb3e6b7d814547d8393fb438748 Mon Sep 17 00:00:00 2001 From: Zbigniew Sobiecki Date: Wed, 15 Apr 2026 22:15:55 +0000 Subject: [PATCH 51/52] test(linear): add text() to makeHttpErrorResponse factory Required because linearGraphQL now reads response.text() to include the body in HTTP-error messages. The factory was incomplete (only mocked ok/status/json) and would have failed for any real Response.text() call. Co-Authored-By: Claude Opus 4.6 (1M context) --- tests/unit/pm/linear/client.test.ts | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/tests/unit/pm/linear/client.test.ts b/tests/unit/pm/linear/client.test.ts index d4b9a420..264c0e9d 100644 --- a/tests/unit/pm/linear/client.test.ts +++ b/tests/unit/pm/linear/client.test.ts @@ -25,11 +25,12 @@ function makeGraphQLErrorResponse(message: string) { }; } -function makeHttpErrorResponse(status: number) { +function makeHttpErrorResponse(status: number, body = '') { return { ok: false, status, json: vi.fn().mockResolvedValue({}), + text: vi.fn().mockResolvedValue(body), }; } From edb54d777e08467b137d8f125291ead49a7dbe0e Mon Sep 17 00:00:00 2001 From: Zbigniew Sobiecki Date: Thu, 16 Apr 2026 06:46:33 +0000 Subject: [PATCH 52/52] =?UTF-8?q?feat(linear):=20full=20label=20parity=20w?= =?UTF-8?q?ith=20Trello=20=E2=80=94=20dropdown=20+=20create-label=20UX?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit The Linear PM wizard stored label mappings as free-text names, but Linear's issueUpdate.labelIds rejects names and requires UUIDs — so the cascade-processing / cascade-processed / cascade-error / cascade-ready / cascade-auto labels were silently never applied to issues. Brings Linear to the same UX as Trello: Backend - linearClient.createLabel(teamId, name, color?) wraps the issueLabelCreate GraphQL mutation, returning the new label's { id, name, color }. - integrationsDiscovery.createLinearLabel + createLinearLabels tRPC mutations mirror the Trello equivalents (single + batch with per-label error reporting). Wizard - LinearFieldMappingStep replaces the free-text label inputs with a dropdown sourced from linearTeamDetails.labels (already fetched by discovery). Saves label UUIDs, not names. - Per-slot "Create" button and a batch "Create All Missing" button use LINEAR_LABEL_DEFAULTS (name + hex color per slot). - useLinearLabelCreation hook mirrors useTrelloLabelCreation (single + batch mutation, ADD_LINEAR_TEAM_LABEL dispatch on success). Adapter hardening - LinearPMProvider.resolveLabelId validates the resolved value is UUID- shaped. If the mapping is missing or holds a name (from a pre-existing configuration), addLabel/removeLabel short-circuit with a diagnostic warn instead of passing a bad value to Linear and failing opaquely. - createWorkItem filters out non-UUID label values (same warn). Operator note: projects configured before this PR have names stored (e.g. processing: "cascade-processing"). Re-open the PM wizard → Label Mappings step; dropdowns will show empty because the saved name no longer matches a dropdown value. Pick from the dropdown — or click "Create All Missing" to provision defaults in one click. Co-Authored-By: Claude Opus 4.6 (1M context) --- src/api/routers/integrationsDiscovery.ts | 70 +++++++++ src/linear/client.ts | 33 +++++ src/pm/linear/adapter.ts | 38 ++++- tests/unit/pm/linear/adapter.test.ts | 29 +++- tests/unit/pm/linear/client.test.ts | 105 ++++++++++++++ .../web/linear-field-mapping-step.test.ts | 73 ++++++++++ tests/unit/web/pm-wizard-state.test.ts | 8 +- .../components/projects/pm-wizard-hooks.ts | 62 ++++++++ .../projects/pm-wizard-linear-steps.tsx | 134 +++++++++++++++--- .../components/projects/pm-wizard-state.ts | 24 +++- web/src/components/projects/pm-wizard.tsx | 42 +++++- 11 files changed, 574 insertions(+), 44 deletions(-) diff --git a/src/api/routers/integrationsDiscovery.ts b/src/api/routers/integrationsDiscovery.ts index ce2ef823..1da2266c 100644 --- a/src/api/routers/integrationsDiscovery.ts +++ b/src/api/routers/integrationsDiscovery.ts @@ -711,4 +711,74 @@ export const integrationsDiscoveryRouter = router({ withLinearCredentials({ apiKey }, () => linearClient.getTeamProjects(input.teamId)), ); }), + + createLinearLabel: protectedProcedure + .input( + linearCredsInput.extend({ + teamId: z.string().min(1), + name: z.string().min(1).max(100), + color: z.string().optional(), + }), + ) + .mutation(async ({ ctx, input }) => { + logger.debug('integrationsDiscovery.createLinearLabel called', { + orgId: ctx.effectiveOrgId, + teamId: input.teamId, + name: input.name, + }); + return withLinearCreds(input, 'Failed to create Linear label', (creds) => + withLinearCredentials(creds, () => + linearClient.createLabel(input.teamId, input.name, input.color), + ), + ); + }), + + createLinearLabels: protectedProcedure + .input( + linearCredsInput.extend({ + teamId: z.string().min(1), + labels: z + .array( + z.object({ + name: z.string().min(1).max(100), + color: z.string().optional(), + }), + ) + .min(1) + .max(10), + }), + ) + .mutation(async ({ ctx, input }) => { + logger.debug('integrationsDiscovery.createLinearLabels called', { + orgId: ctx.effectiveOrgId, + teamId: input.teamId, + count: input.labels.length, + }); + const creds = { apiKey: input.apiKey }; + + const results = await Promise.allSettled( + input.labels.map((label) => + withLinearCredentials(creds, () => + linearClient.createLabel(input.teamId, label.name, label.color), + ), + ), + ); + + const successes: Array<{ id: string; name: string; color: string }> = []; + const errors: Array<{ name: string; error: string }> = []; + + for (let i = 0; i < results.length; i++) { + const result = results[i]; + if (result.status === 'fulfilled') { + successes.push(result.value); + } else { + errors.push({ + name: input.labels[i].name, + error: result.reason instanceof Error ? result.reason.message : String(result.reason), + }); + } + } + + return { successes, errors }; + }), }); diff --git a/src/linear/client.ts b/src/linear/client.ts index 144d53d4..3d798b14 100644 --- a/src/linear/client.ts +++ b/src/linear/client.ts @@ -460,6 +460,39 @@ export const linearClient = { return linearClient.updateIssue(issueId, { labelIds: updatedLabelIds }); }, + async createLabel( + teamId: string, + name: string, + color?: string, + ): Promise<{ id: string; name: string; color: string }> { + logger.debug('Creating Linear issue label', { teamId, name, color }); + const input: { teamId: string; name: string; color?: string } = { teamId, name }; + if (color) input.color = color; + const data = await linearGraphQL<{ + issueLabelCreate: { + success: boolean; + issueLabel: { id: string; name: string; color: string } | null; + }; + }>( + `mutation CreateIssueLabel($input: IssueLabelCreateInput!) { + issueLabelCreate(input: $input) { + success + issueLabel { + id + name + color + } + } + }`, + { input }, + ); + if (!data.issueLabelCreate.success || !data.issueLabelCreate.issueLabel) { + throw new Error('Linear issueLabelCreate returned success=false'); + } + const label = data.issueLabelCreate.issueLabel; + return { id: label.id, name: label.name, color: label.color }; + }, + // ===== Attachments ===== async getAttachments(issueId: string): Promise { diff --git a/src/pm/linear/adapter.ts b/src/pm/linear/adapter.ts index c271edfc..9b65baff 100644 --- a/src/pm/linear/adapter.ts +++ b/src/pm/linear/adapter.ts @@ -22,11 +22,36 @@ import type { WorkItemLabel, } from '../types.js'; +const UUID_PATTERN = /^[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12}$/i; + export class LinearPMProvider implements PMProvider { readonly type = 'linear' as const; constructor(private config: LinearConfig) {} + /** + * Resolve a label slot name or raw ID to a Linear label UUID. + * + * Linear's GraphQL API requires UUIDs for issueUpdate.labelIds and + * issueLabelCreate lookups. Returning a non-UUID string would silently + * fail server-side, so we short-circuit misconfigurations here with a + * diagnostic. Returns null when the input cannot be resolved to a UUID. + */ + private resolveLabelId(slotOrId: string): string | null { + const mapped = (this.config.labels as Record | undefined)?.[slotOrId]; + const candidate = mapped ?? slotOrId; + if (UUID_PATTERN.test(candidate)) return candidate; + logger.warn( + '[Linear] Label value is not a UUID — skipping (check PM wizard → Label Mappings)', + { + input: slotOrId, + resolved: mapped ?? '', + teamId: this.config.teamId, + }, + ); + return null; + } + async getWorkItem(id: string): Promise { const issue = await linearClient.getIssue(id); return { @@ -88,8 +113,8 @@ export class LinearPMProvider implements PMProvider { ...(config.labels?.length ? { labelIds: config.labels - .map((name) => (this.config.labels as Record | undefined)?.[name]) - .filter((id): id is string => !!id), + .map((name) => this.resolveLabelId(name)) + .filter((id): id is string => id !== null), } : {}), }); @@ -152,15 +177,14 @@ export class LinearPMProvider implements PMProvider { } async addLabel(id: string, labelIdOrName: string): Promise { - // Resolve name → ID via config if possible - const labelId = - (this.config.labels as Record | undefined)?.[labelIdOrName] ?? labelIdOrName; + const labelId = this.resolveLabelId(labelIdOrName); + if (!labelId) return; await linearClient.addLabel(id, labelId); } async removeLabel(id: string, labelIdOrName: string): Promise { - const labelId = - (this.config.labels as Record | undefined)?.[labelIdOrName] ?? labelIdOrName; + const labelId = this.resolveLabelId(labelIdOrName); + if (!labelId) return; await linearClient.removeLabel(id, labelId); } diff --git a/tests/unit/pm/linear/adapter.test.ts b/tests/unit/pm/linear/adapter.test.ts index 552d2af2..0c8239db 100644 --- a/tests/unit/pm/linear/adapter.test.ts +++ b/tests/unit/pm/linear/adapter.test.ts @@ -53,7 +53,7 @@ const defaultConfig = { cancelled: 'state-cancelled', }, labels: { - processing: 'label-processing-id', + processing: '11111111-1111-4111-8111-111111111111', }, }; @@ -302,15 +302,29 @@ describe('LinearPMProvider', () => { await provider.addLabel('issue-uuid', 'processing'); - expect(mockAddLabel).toHaveBeenCalledWith('issue-uuid', 'label-processing-id'); + expect(mockAddLabel).toHaveBeenCalledWith( + 'issue-uuid', + '11111111-1111-4111-8111-111111111111', + ); }); - it('passes label ID directly when not in config', async () => { + it('passes a UUID-shaped value through when not in config', async () => { mockAddLabel.mockResolvedValue(makeIssue()); - await provider.addLabel('issue-uuid', 'raw-label-id'); + await provider.addLabel('issue-uuid', '550e8400-e29b-41d4-a716-446655440000'); - expect(mockAddLabel).toHaveBeenCalledWith('issue-uuid', 'raw-label-id'); + expect(mockAddLabel).toHaveBeenCalledWith( + 'issue-uuid', + '550e8400-e29b-41d4-a716-446655440000', + ); + }); + + it('skips the API call and warns when the value is neither a mapped slot nor a UUID', async () => { + // Linear API rejects non-UUID labelIds; rather than silently fail we + // short-circuit and emit a diagnostic so the misconfiguration is visible. + await provider.addLabel('issue-uuid', 'unmapped-slot'); + + expect(mockAddLabel).not.toHaveBeenCalled(); }); }); @@ -320,7 +334,10 @@ describe('LinearPMProvider', () => { await provider.removeLabel('issue-uuid', 'processing'); - expect(mockRemoveLabel).toHaveBeenCalledWith('issue-uuid', 'label-processing-id'); + expect(mockRemoveLabel).toHaveBeenCalledWith( + 'issue-uuid', + '11111111-1111-4111-8111-111111111111', + ); }); }); diff --git a/tests/unit/pm/linear/client.test.ts b/tests/unit/pm/linear/client.test.ts index 264c0e9d..a6c2608b 100644 --- a/tests/unit/pm/linear/client.test.ts +++ b/tests/unit/pm/linear/client.test.ts @@ -394,4 +394,109 @@ describe('linearClient discovery methods', () => { ); }); }); + + // ========================================================================= + // createLabel + // ========================================================================= + describe('createLabel', () => { + it('returns the created label with id, name, and color', async () => { + mockFetch.mockResolvedValue( + makeGraphQLResponse({ + issueLabelCreate: { + success: true, + issueLabel: { + id: 'new-label-uuid', + name: 'cascade-processing', + color: '#0F7938', + }, + }, + }), + ); + + const result = await withLinearCredentials(TEST_CREDS, () => + linearClient.createLabel('team-1', 'cascade-processing', '#0F7938'), + ); + + expect(result).toEqual({ + id: 'new-label-uuid', + name: 'cascade-processing', + color: '#0F7938', + }); + }); + + it('omits color when not provided (Linear auto-assigns)', async () => { + mockFetch.mockResolvedValue( + makeGraphQLResponse({ + issueLabelCreate: { + success: true, + issueLabel: { id: 'l1', name: 'cascade-auto', color: '#555' }, + }, + }), + ); + + await withLinearCredentials(TEST_CREDS, () => + linearClient.createLabel('team-1', 'cascade-auto'), + ); + + const body = JSON.parse(mockFetch.mock.calls[0][1].body as string); + expect(body.variables.input.teamId).toBe('team-1'); + expect(body.variables.input.name).toBe('cascade-auto'); + expect(body.variables.input).not.toHaveProperty('color'); + }); + + it('passes teamId, name, and color in the input variable', async () => { + mockFetch.mockResolvedValue( + makeGraphQLResponse({ + issueLabelCreate: { + success: true, + issueLabel: { id: 'l1', name: 'cascade-error', color: '#E11D48' }, + }, + }), + ); + + await withLinearCredentials(TEST_CREDS, () => + linearClient.createLabel('team-xyz', 'cascade-error', '#E11D48'), + ); + + const body = JSON.parse(mockFetch.mock.calls[0][1].body as string); + expect(body.query).toContain('issueLabelCreate'); + expect(body.variables.input).toEqual({ + teamId: 'team-xyz', + name: 'cascade-error', + color: '#E11D48', + }); + }); + + it('throws when issueLabelCreate returns success: false', async () => { + mockFetch.mockResolvedValue( + makeGraphQLResponse({ + issueLabelCreate: { success: false, issueLabel: null }, + }), + ); + + await expect( + withLinearCredentials(TEST_CREDS, () => linearClient.createLabel('team-1', 'x')), + ).rejects.toThrow('Linear issueLabelCreate returned success=false'); + }); + + it('throws on GraphQL errors (e.g. duplicate label name)', async () => { + mockFetch.mockResolvedValue( + makeGraphQLErrorResponse('Label with name "cascade-processing" already exists'), + ); + + await expect( + withLinearCredentials(TEST_CREDS, () => + linearClient.createLabel('team-1', 'cascade-processing'), + ), + ).rejects.toThrow(/already exists/); + }); + + it('throws on HTTP errors', async () => { + mockFetch.mockResolvedValue(makeHttpErrorResponse(401, 'bad token')); + + await expect( + withLinearCredentials(TEST_CREDS, () => linearClient.createLabel('team-1', 'x')), + ).rejects.toThrow('Linear API HTTP error 401'); + }); + }); }); diff --git a/tests/unit/web/linear-field-mapping-step.test.ts b/tests/unit/web/linear-field-mapping-step.test.ts index b5d98d66..4159e9f4 100644 --- a/tests/unit/web/linear-field-mapping-step.test.ts +++ b/tests/unit/web/linear-field-mapping-step.test.ts @@ -113,3 +113,76 @@ describe('LinearFieldMappingStep — status slots', () => { } }); }); + +describe('LinearFieldMappingStep — label slots', () => { + function renderWithLabels( + labels: Array<{ id: string; name: string; color: string }>, + persisted: Record = {}, + onCreateLabel?: (slot: string) => void, + onCreateAllMissingLabels?: () => void, + ): string { + const state = makeState({ + linearTeamDetails: { + states: [], + labels, + }, + linearLabels: persisted, + }); + return renderToStaticMarkup( + createElement(LinearFieldMappingStep, { + state, + dispatch: () => {}, + onCreateLabel, + onCreateAllMissingLabels, + }), + ); + } + + it('renders label dropdowns sourced from linearTeamDetails.labels (ID-backed options)', () => { + const html = renderWithLabels([ + { id: 'lbl-proc-uuid', name: 'cascade-processing', color: '#2563EB' }, + { id: 'lbl-done-uuid', name: 'cascade-processed', color: '#16A34A' }, + ]); + // The label dropdown must expose each Linear label's UUID as an option value. + expect(html).toContain('value="lbl-proc-uuid"'); + expect(html).toContain('value="lbl-done-uuid"'); + // Display names should NOT appear as option values (they can still be in the label text). + expect(html).not.toContain('value="cascade-processing"'); + }); + + it('shows the "Create" affordance for slots with no mapping and no existing matching label', () => { + const html = renderWithLabels( + [], + {}, + () => {}, + () => {}, + ); + // A dedicated create button per slot — look for the batch button text too. + expect(html).toMatch(/Create All Missing/); + }); + + it('hides the per-slot Create button when the default label already exists on the team', () => { + const html = renderWithLabels( + [ + { id: 'lbl-ready', name: 'cascade-ready', color: '#0284C7' }, + { id: 'lbl-proc', name: 'cascade-processing', color: '#2563EB' }, + { id: 'lbl-procd', name: 'cascade-processed', color: '#16A34A' }, + { id: 'lbl-err', name: 'cascade-error', color: '#DC2626' }, + { id: 'lbl-auto', name: 'cascade-auto', color: '#9333EA' }, + ], + {}, + () => {}, + () => {}, + ); + // With every default present, there's nothing left to create → batch button hidden. + expect(html).not.toMatch(/Create All Missing/); + }); + + it('reflects persisted label mappings as selected dropdown values', () => { + const html = renderWithLabels( + [{ id: 'lbl-proc-uuid', name: 'cascade-processing', color: '#2563EB' }], + { processing: 'lbl-proc-uuid' }, + ); + expect(html).toContain('value="lbl-proc-uuid"'); + }); +}); diff --git a/tests/unit/web/pm-wizard-state.test.ts b/tests/unit/web/pm-wizard-state.test.ts index 8101e054..f7feeb17 100644 --- a/tests/unit/web/pm-wizard-state.test.ts +++ b/tests/unit/web/pm-wizard-state.test.ts @@ -837,9 +837,13 @@ describe('buildLinearIntegrationConfig — save payload', () => { const bare = buildLinearIntegrationConfig(seed()); expect(bare).not.toHaveProperty('labels'); const withLabels = buildLinearIntegrationConfig( - seed({ linearLabels: { processing: 'cascade-processing' } }), + // Linear labels are stored as UUIDs (the Linear API rejects names for + // issueUpdate.labelIds). Wizard dropdowns populate from the team's labels. + seed({ linearLabels: { processing: '11111111-1111-4111-8111-111111111111' } }), ); - expect(withLabels).toHaveProperty('labels', { processing: 'cascade-processing' }); + expect(withLabels).toHaveProperty('labels', { + processing: '11111111-1111-4111-8111-111111111111', + }); }); }); diff --git a/web/src/components/projects/pm-wizard-hooks.ts b/web/src/components/projects/pm-wizard-hooks.ts index 714ae643..21ee4b81 100644 --- a/web/src/components/projects/pm-wizard-hooks.ts +++ b/web/src/components/projects/pm-wizard-hooks.ts @@ -735,3 +735,65 @@ export function useSaveMutation(projectId: string, state: WizardState) { return { saveMutation }; } + +// ============================================================================ +// Linear Label Creation +// ============================================================================ + +export function useLinearLabelCreation(state: WizardState, dispatch: React.Dispatch) { + const createLabelMutation = useMutation({ + mutationFn: (vars: { name: string; color?: string; slot: string }) => { + if (!state.linearApiKey || !state.linearTeamId) { + throw new Error('Missing credentials or team selection'); + } + return trpcClient.integrationsDiscovery.createLinearLabel.mutate({ + apiKey: state.linearApiKey, + teamId: state.linearTeamId, + name: vars.name, + color: vars.color, + }); + }, + onSuccess: (label, vars) => { + dispatch({ type: 'ADD_LINEAR_TEAM_LABEL', label }); + dispatch({ type: 'SET_LINEAR_LABEL', key: vars.slot, value: label.id }); + }, + onError: (error) => { + console.error('Failed to create Linear label:', error); + alert(`Failed to create label: ${error instanceof Error ? error.message : String(error)}`); + }, + }); + + const createMissingLabelsMutation = useMutation({ + mutationFn: (labelsToCreate: Array<{ slot: string; name: string; color?: string }>) => { + if (!state.linearApiKey || !state.linearTeamId) { + throw new Error('Missing credentials or team selection'); + } + return trpcClient.integrationsDiscovery.createLinearLabels.mutate({ + apiKey: state.linearApiKey, + teamId: state.linearTeamId, + labels: labelsToCreate.map(({ name, color }) => ({ name, color })), + }); + }, + onSuccess: (result, labelsToCreate) => { + for (const label of result.successes) { + const slot = labelsToCreate.find((l) => l.name === label.name)?.slot; + if (slot) { + dispatch({ type: 'ADD_LINEAR_TEAM_LABEL', label }); + dispatch({ type: 'SET_LINEAR_LABEL', key: slot, value: label.id }); + } + } + if (result.errors.length > 0) { + const errorMsg = result.errors.map((e) => `${e.name}: ${e.error}`).join('\n'); + alert( + `Some labels failed to create:\n${errorMsg}\n\n${result.successes.length} label(s) created successfully.`, + ); + } + }, + onError: (error) => { + console.error('Failed to create Linear labels:', error); + alert(`Failed to create labels: ${error instanceof Error ? error.message : String(error)}`); + }, + }); + + return { createLabelMutation, createMissingLabelsMutation }; +} diff --git a/web/src/components/projects/pm-wizard-linear-steps.tsx b/web/src/components/projects/pm-wizard-linear-steps.tsx index 4729d23f..fc96e6c3 100644 --- a/web/src/components/projects/pm-wizard-linear-steps.tsx +++ b/web/src/components/projects/pm-wizard-linear-steps.tsx @@ -3,7 +3,8 @@ */ import type { UseMutationResult } from '@tanstack/react-query'; -import { CheckCircle2, Loader2 } from 'lucide-react'; +import { CheckCircle2, Loader2, Plus } from 'lucide-react'; +import { Button } from '@/components/ui/button.js'; import { Input } from '@/components/ui/input.js'; import { Label } from '@/components/ui/label.js'; import type { WizardAction, WizardState } from './pm-wizard-state.js'; @@ -26,6 +27,19 @@ const LINEAR_STATUS_SLOTS = [ const LINEAR_LABEL_SLOTS = ['processing', 'processed', 'error', 'readyToProcess', 'auto']; +/** + * Default CASCADE label names + hex colors used when the operator clicks + * "Create" on an unmapped slot. Linear expects hex color strings on + * issueLabelCreate; picked to roughly match the Trello named-color palette. + */ +export const LINEAR_LABEL_DEFAULTS: Record = { + readyToProcess: { name: 'cascade-ready', color: '#0284C7' }, + processing: { name: 'cascade-processing', color: '#2563EB' }, + processed: { name: 'cascade-processed', color: '#16A34A' }, + error: { name: 'cascade-error', color: '#DC2626' }, + auto: { name: 'cascade-auto', color: '#9333EA' }, +}; + // ============================================================================ // LinearCredentialsStep // ============================================================================ @@ -155,10 +169,26 @@ export function LinearTeamStep({ export function LinearFieldMappingStep({ state, dispatch, + onCreateLabel, + onCreateAllMissingLabels, + creatingSlot, }: { state: WizardState; dispatch: React.Dispatch; + onCreateLabel?: (slot: string) => void; + onCreateAllMissingLabels?: () => void; + creatingSlot?: string | null; }) { + const existingLabelNames = new Set( + (state.linearTeamDetails?.labels ?? []).map((l) => l.name.toLowerCase()), + ); + + const missingSlots = LINEAR_LABEL_SLOTS.filter((slot) => { + if (state.linearLabels[slot]) return false; + const defaultName = LINEAR_LABEL_DEFAULTS[slot]?.name ?? ''; + return !existingLabelNames.has(defaultName.toLowerCase()); + }); + return (
{/* Status mappings */} @@ -199,29 +229,91 @@ export function LinearFieldMappingStep({ )}
- {/* Labels */} + {/* Label mappings */}
- +
+ + {state.linearTeamDetails && missingSlots.length > 0 && onCreateAllMissingLabels && ( + + )} +

- CASCADE label names used in Linear. These are created automatically by CASCADE. + Map each CASCADE label to a Linear label on the team. Click "Create" to add missing ones.

- {LINEAR_LABEL_SLOTS.map((slot) => ( -
- {slot} - - dispatch({ - type: 'SET_LINEAR_LABEL', - key: slot, - value: e.target.value, - }) - } - placeholder={`Linear label for ${slot}`} - className="flex-1" - /> -
- ))} + {state.linearTeamDetails ? ( + LINEAR_LABEL_SLOTS.map((slot) => { + const isMapped = !!state.linearLabels[slot]; + const defaultInfo = LINEAR_LABEL_DEFAULTS[slot]; + const alreadyExists = + defaultInfo && existingLabelNames.has(defaultInfo.name.toLowerCase()); + const showCreateButton = !isMapped && !alreadyExists && onCreateLabel && defaultInfo; + + return ( +
+
+ l.name) + .map((l) => ({ + label: `${l.name} (${l.color})`, + value: l.id, + })) ?? [] + } + value={state.linearLabels[slot] ?? ''} + onChange={(v) => + dispatch({ + type: 'SET_LINEAR_LABEL', + key: slot, + value: v, + }) + } + manualFallback + /> +
+ {showCreateButton && ( + + )} +
+ ); + }) + ) : ( +

+ Select a team first to populate label options. +

+ )}
); diff --git a/web/src/components/projects/pm-wizard-state.ts b/web/src/components/projects/pm-wizard-state.ts index 791501bb..99685951 100644 --- a/web/src/components/projects/pm-wizard-state.ts +++ b/web/src/components/projects/pm-wizard-state.ts @@ -133,6 +133,7 @@ export type WizardAction = | { type: 'SET_LINEAR_LABEL'; key: string; value: string } | { type: 'INIT_EDIT'; state: Partial } | { type: 'ADD_TRELLO_BOARD_LABEL'; label: { id: string; name: string; color: string } } + | { type: 'ADD_LINEAR_TEAM_LABEL'; label: { id: string; name: string; color: string } } | { type: 'ADD_TRELLO_BOARD_CUSTOM_FIELD'; customField: { id: string; name: string; type: string }; @@ -151,13 +152,13 @@ export const INITIAL_JIRA_LABELS: Record = { auto: 'cascade-auto', }; -export const INITIAL_LINEAR_LABELS: Record = { - processing: 'cascade-processing', - processed: 'cascade-processed', - error: 'cascade-error', - readyToProcess: 'cascade-ready', - auto: 'cascade-auto', -}; +/** + * Linear label mappings store workflow-label **UUIDs**, not names, because + * Linear's GraphQL API rejects names for issueUpdate.labelIds. The wizard + * populates these from the team's existing labels or via the create-label + * button. Initial state is therefore empty — operators pick or create. + */ +export const INITIAL_LINEAR_LABELS: Record = {}; export function createInitialState(): WizardState { return { @@ -349,6 +350,15 @@ export const wizardReducer: Reducer = (state, action) labels: [...state.trelloBoardDetails.labels, action.label], }, }; + case 'ADD_LINEAR_TEAM_LABEL': + if (!state.linearTeamDetails) return state; + return { + ...state, + linearTeamDetails: { + ...state.linearTeamDetails, + labels: [...state.linearTeamDetails.labels, action.label], + }, + }; case 'ADD_TRELLO_BOARD_CUSTOM_FIELD': if (!state.trelloBoardDetails) return state; return { diff --git a/web/src/components/projects/pm-wizard.tsx b/web/src/components/projects/pm-wizard.tsx index 986da19d..2988f139 100644 --- a/web/src/components/projects/pm-wizard.tsx +++ b/web/src/components/projects/pm-wizard.tsx @@ -8,6 +8,7 @@ import { useJiraCustomFieldCreation, useJiraDiscovery, useLinearDiscovery, + useLinearLabelCreation, useLinearWebhookInfo, useSaveMutation, useTrelloCustomFieldCreation, @@ -22,6 +23,7 @@ import { JiraProjectStep, } from './pm-wizard-jira-steps.js'; import { + LINEAR_LABEL_DEFAULTS, LinearCredentialsStep, LinearFieldMappingStep, LinearTeamStep, @@ -151,6 +153,10 @@ export function PMWizard({ state, dispatch, ); + const { + createLabelMutation: createLinearLabelMutation, + createMissingLabelsMutation: createMissingLinearLabelsMutation, + } = useLinearLabelCreation(state, dispatch); const { createCustomFieldMutation } = useTrelloCustomFieldCreation(state, dispatch); const { createJiraCustomFieldMutation } = useJiraCustomFieldCreation(state, dispatch); const webhookManagement = useWebhookManagement(projectId, state); @@ -207,6 +213,34 @@ export function PMWizard({ } }; + const handleCreateLinearLabel = (slot: string) => { + const defaults = LINEAR_LABEL_DEFAULTS[slot]; + if (!defaults) return; + setCreatingSlot(slot); + createLinearLabelMutation.mutate( + { name: defaults.name, color: defaults.color, slot }, + { onSettled: () => setCreatingSlot(null) }, + ); + }; + + const handleCreateAllMissingLinearLabels = () => { + const existingLabelNames = new Set( + (state.linearTeamDetails?.labels ?? []).map((l) => l.name.toLowerCase()), + ); + const labelsToCreate = Object.entries(LINEAR_LABEL_DEFAULTS) + .filter(([slot, { name }]) => { + if (state.linearLabels[slot]) return false; + return !existingLabelNames.has(name.toLowerCase()); + }) + .map(([slot, { name, color }]) => ({ slot, name, color })); + if (labelsToCreate.length > 0) { + setCreatingSlot('__batch__'); + createMissingLinearLabelsMutation.mutate(labelsToCreate, { + onSettled: () => setCreatingSlot(null), + }); + } + }; + // ---- Step status ---- const credsReady = areCredentialsReady(state); @@ -361,7 +395,13 @@ export function PMWizard({ creatingCostField={creatingCostField} /> ) : state.provider === 'linear' ? ( - + ) : (