diff --git a/apps/server/src/audit/Services/AuditLogService.ts b/apps/server/src/audit/Services/AuditLogService.ts new file mode 100644 index 0000000000..f949e6c888 --- /dev/null +++ b/apps/server/src/audit/Services/AuditLogService.ts @@ -0,0 +1,177 @@ +/** + * AuditLogService - Service interface and Live layer for structured audit logging. + * + * Records security-relevant and operational events with actor, category, + * and severity metadata. Supports paginated queries and live streaming. + * + * @module AuditLogService + */ +import type { + AuditEntry, + AuditQueryInput, + AuditQueryResult, + AuditStreamEvent, +} from "@t3tools/contracts"; +import { Effect, Layer, PubSub, ServiceMap, Stream } from "effect"; +import * as SqlClient from "effect/unstable/sql/SqlClient"; + +export interface AuditLogServiceShape { + /** + * Record a structured audit entry. + * + * Persists to the audit_log table and publishes to the live event stream. + */ + readonly record: (entry: { + readonly actor: AuditEntry["actor"]; + readonly actorId: string | null; + readonly category: AuditEntry["category"]; + readonly action: string; + readonly severity: AuditEntry["severity"]; + readonly projectId: string | null; + readonly threadId: string | null; + readonly commandId: string | null; + readonly eventId: string | null; + readonly summary: string; + readonly detail: string | null; + readonly metadata: Record; + }) => Effect.Effect; + + /** + * Query audit entries with filters and pagination. + */ + readonly query: (input: AuditQueryInput) => Effect.Effect; + + /** + * Live stream of new audit entries. + * + * Each access creates a fresh PubSub subscription so multiple consumers + * independently receive all events. + */ + readonly streamEvents: Stream.Stream; +} + +export class AuditLogService extends ServiceMap.Service()( + "t3/audit/Services/AuditLogService", +) {} + +const makeAuditLogService = Effect.gen(function* () { + const sql = yield* SqlClient.SqlClient; + const pubsub = yield* PubSub.unbounded(); + + const record: AuditLogServiceShape["record"] = (input) => + Effect.gen(function* () { + const id = crypto.randomUUID(); + const now = new Date().toISOString(); + const metadataJson = JSON.stringify(input.metadata); + + yield* sql`INSERT INTO audit_log (id, timestamp, actor, actor_id, category, action, severity, project_id, thread_id, command_id, event_id, summary, detail, metadata) + VALUES (${id}, ${now}, ${input.actor}, ${input.actorId}, ${input.category}, ${input.action}, ${input.severity}, ${input.projectId}, ${input.threadId}, ${input.commandId}, ${input.eventId}, ${input.summary}, ${input.detail}, ${metadataJson})`; + + const entry: AuditEntry = { + id: id as AuditEntry["id"], + timestamp: now, + actor: input.actor, + actorId: (input.actorId ?? null) as AuditEntry["actorId"], + category: input.category, + action: input.action as AuditEntry["action"], + severity: input.severity, + projectId: (input.projectId ?? null) as AuditEntry["projectId"], + threadId: (input.threadId ?? null) as AuditEntry["threadId"], + commandId: (input.commandId ?? null) as AuditEntry["commandId"], + eventId: (input.eventId ?? null) as AuditEntry["eventId"], + summary: input.summary as AuditEntry["summary"], + detail: (input.detail ?? null) as AuditEntry["detail"], + metadata: input.metadata, + }; + + yield* PubSub.publish(pubsub, { type: "audit.entry" as const, entry }); + return entry; + }).pipe(Effect.orDie); + + const query: AuditLogServiceShape["query"] = (input) => + Effect.gen(function* () { + const conditions: Array = []; + const params: Array = []; + + if (input.projectId) { + conditions.push("project_id = ?"); + params.push(input.projectId); + } + if (input.threadId) { + conditions.push("thread_id = ?"); + params.push(input.threadId); + } + if (input.category) { + conditions.push("category = ?"); + params.push(input.category); + } + if (input.severity) { + conditions.push("severity = ?"); + params.push(input.severity); + } + if (input.actor) { + conditions.push("actor = ?"); + params.push(input.actor); + } + if (input.fromTimestamp) { + conditions.push("timestamp >= ?"); + params.push(input.fromTimestamp); + } + if (input.toTimestamp) { + conditions.push("timestamp <= ?"); + params.push(input.toTimestamp); + } + + const whereClause = conditions.length > 0 ? `WHERE ${conditions.join(" AND ")}` : ""; + const limit = Math.max(1, input.limit); + const offset = Math.max(0, input.offset); + + const countResult = yield* sql.unsafe<{ total: number }>( + `SELECT COUNT(*) as total FROM audit_log ${whereClause}`, + params, + ); + const total = Number(countResult[0]?.total ?? 0); + + const rows = yield* sql.unsafe>( + `SELECT id, timestamp, actor, actor_id, category, action, severity, project_id, thread_id, command_id, event_id, summary, detail, metadata + FROM audit_log ${whereClause} + ORDER BY timestamp DESC + LIMIT ? OFFSET ?`, + [...params, limit, offset], + ); + + const entries: AuditEntry[] = rows.map((r) => ({ + id: r["id"] as AuditEntry["id"], + timestamp: r["timestamp"] as string, + actor: r["actor"] as AuditEntry["actor"], + actorId: (r["actor_id"] ?? null) as AuditEntry["actorId"], + category: r["category"] as AuditEntry["category"], + action: r["action"] as AuditEntry["action"], + severity: r["severity"] as AuditEntry["severity"], + projectId: (r["project_id"] ?? null) as AuditEntry["projectId"], + threadId: (r["thread_id"] ?? null) as AuditEntry["threadId"], + commandId: (r["command_id"] ?? null) as AuditEntry["commandId"], + eventId: (r["event_id"] ?? null) as AuditEntry["eventId"], + summary: r["summary"] as AuditEntry["summary"], + detail: (r["detail"] ?? null) as AuditEntry["detail"], + metadata: + typeof r["metadata"] === "string" ? JSON.parse(r["metadata"]) : (r["metadata"] ?? {}), + })); + + return { + entries, + total: total as AuditQueryResult["total"], + hasMore: offset + limit < total, + } satisfies AuditQueryResult; + }).pipe(Effect.orDie); + + return { + record, + query, + get streamEvents(): AuditLogServiceShape["streamEvents"] { + return Stream.fromPubSub(pubsub); + }, + } satisfies AuditLogServiceShape; +}); + +export const AuditLogServiceLive = Layer.effect(AuditLogService, makeAuditLogService); diff --git a/apps/server/src/ci/Services/CIIntegrationService.ts b/apps/server/src/ci/Services/CIIntegrationService.ts new file mode 100644 index 0000000000..09631041f8 --- /dev/null +++ b/apps/server/src/ci/Services/CIIntegrationService.ts @@ -0,0 +1,168 @@ +/** + * CIIntegrationService - Service interface for CI pipeline integration. + * + * Tracks CI runs, manages feedback policies for automated responses to + * failures, and exposes a live event stream for CI status updates. + * + * @module CIIntegrationService + */ +import type { + CIFeedbackPolicy, + CIGetStatusInput, + CIGetStatusResult, + CIRun, + CIRunId, + CISetFeedbackPolicyInput, + CIStreamEvent, + CITriggerRerunInput, +} from "@t3tools/contracts"; +import { Effect, Layer, PubSub, ServiceMap, Stream } from "effect"; +import * as SqlClient from "effect/unstable/sql/SqlClient"; + +export interface CIIntegrationServiceShape { + /** + * Query CI run status with project/thread/branch filters. + */ + readonly getStatus: (input: CIGetStatusInput) => Effect.Effect; + + /** + * Record or update a CI run entry. + */ + readonly recordRun: (run: CIRun) => Effect.Effect; + + /** + * Trigger a re-run of a CI pipeline. Records an audit-style activity + * event and returns the run being re-triggered. + */ + readonly triggerRerun: (input: CITriggerRerunInput) => Effect.Effect; + + /** + * Create or update the feedback policy for a project. + */ + readonly setFeedbackPolicy: (input: CISetFeedbackPolicyInput) => Effect.Effect; + + /** + * Retrieve the feedback policy for a project, if one exists. + */ + readonly getFeedbackPolicy: (projectId: string) => Effect.Effect; + + /** + * Live stream of CI events (run updates, feedback triggers). + * + * Each access creates a fresh PubSub subscription so multiple consumers + * independently receive all events. + */ + readonly streamEvents: Stream.Stream; +} + +export class CIIntegrationService extends ServiceMap.Service< + CIIntegrationService, + CIIntegrationServiceShape +>()("t3/ci/Services/CIIntegrationService") {} + +const makeCIIntegrationService = Effect.gen(function* () { + const sql = yield* SqlClient.SqlClient; + const pubsub = yield* PubSub.unbounded(); + + const runFromRow = (r: Record): CIRun => ({ + id: r["id"] as CIRunId, + projectId: r["project_id"] as CIRun["projectId"], + threadId: (r["thread_id"] ?? null) as CIRun["threadId"], + turnId: (r["turn_id"] ?? null) as CIRun["turnId"], + provider: r["provider"] as CIRun["provider"], + workflowName: r["workflow_name"] as CIRun["workflowName"], + branch: r["branch"] as CIRun["branch"], + commitSha: r["commit_sha"] as CIRun["commitSha"], + status: r["status"] as CIRun["status"], + conclusion: (r["conclusion"] ?? null) as CIRun["conclusion"], + jobs: JSON.parse(r["jobs"] as string), + htmlUrl: (r["html_url"] ?? null) as CIRun["htmlUrl"], + startedAt: r["started_at"] as string, + completedAt: (r["completed_at"] ?? null) as CIRun["completedAt"], + updatedAt: r["updated_at"] as string, + }); + + const getStatus: CIIntegrationServiceShape["getStatus"] = (input) => + Effect.gen(function* () { + const rows = yield* sql.unsafe>( + `SELECT * FROM ci_runs WHERE project_id = ?${input.threadId ? " AND thread_id = ?" : ""}${input.branch ? " AND branch = ?" : ""} ORDER BY started_at DESC LIMIT ?`, + [ + input.projectId, + ...(input.threadId ? [input.threadId] : []), + ...(input.branch ? [input.branch] : []), + input.limit, + ], + ); + return { runs: rows.map(runFromRow), hasMore: rows.length === input.limit } as const; + }).pipe(Effect.orDie); + + const recordRun: CIIntegrationServiceShape["recordRun"] = (run) => + Effect.gen(function* () { + yield* sql`INSERT OR REPLACE INTO ci_runs (id, project_id, thread_id, turn_id, provider, workflow_name, branch, commit_sha, status, conclusion, jobs, html_url, started_at, completed_at, updated_at) + VALUES (${run.id}, ${run.projectId}, ${run.threadId}, ${run.turnId}, ${run.provider}, ${run.workflowName}, ${run.branch}, ${run.commitSha}, ${run.status}, ${run.conclusion}, ${JSON.stringify(run.jobs)}, ${run.htmlUrl}, ${run.startedAt}, ${run.completedAt}, ${run.updatedAt})`; + yield* PubSub.publish(pubsub, { type: "ci.run.updated" as const, run }); + return run; + }).pipe(Effect.orDie); + + const triggerRerun: CIIntegrationServiceShape["triggerRerun"] = (input) => + Effect.gen(function* () { + const rows = yield* sql< + Record + >`SELECT * FROM ci_runs WHERE id = ${input.runId}`; + const run = rows[0]; + if (!run) return yield* Effect.fail(new Error(`CI run ${input.runId} not found`)); + const now = new Date().toISOString(); + const requeued = { + ...runFromRow(run), + status: "queued" as const, + conclusion: null, + updatedAt: now, + }; + yield* recordRun(requeued); + return requeued; + }).pipe(Effect.orDie); + + const setFeedbackPolicy: CIIntegrationServiceShape["setFeedbackPolicy"] = (input) => + Effect.gen(function* () { + yield* sql`INSERT OR REPLACE INTO ci_feedback_policies (project_id, on_failure, auto_fix_max_attempts, watch_branches) + VALUES (${input.projectId}, ${input.onFailure}, ${input.autoFixMaxAttempts}, ${JSON.stringify(input.watchBranches)})`; + return { + projectId: input.projectId, + onFailure: input.onFailure, + autoFixMaxAttempts: input.autoFixMaxAttempts, + watchBranches: input.watchBranches, + } satisfies CIFeedbackPolicy; + }).pipe(Effect.orDie); + + const getFeedbackPolicy: CIIntegrationServiceShape["getFeedbackPolicy"] = (projectId) => + Effect.gen(function* () { + const rows = yield* sql<{ + project_id: string; + on_failure: string; + auto_fix_max_attempts: number; + watch_branches: string; + }>`SELECT * FROM ci_feedback_policies WHERE project_id = ${projectId}`; + const row = rows[0]; + if (!row) return null; + return { + projectId: row.project_id as CIFeedbackPolicy["projectId"], + onFailure: row.on_failure as CIFeedbackPolicy["onFailure"], + autoFixMaxAttempts: row.auto_fix_max_attempts, + watchBranches: JSON.parse(row.watch_branches), + } satisfies CIFeedbackPolicy; + }).pipe(Effect.orDie); + + return { + getStatus, + recordRun, + triggerRerun, + setFeedbackPolicy, + getFeedbackPolicy, + streamEvents: Stream.fromPubSub(pubsub), + }; +}); + +export const CIIntegrationServiceLive = Layer.effect( + CIIntegrationService, + makeCIIntegrationService, +); diff --git a/apps/server/src/cost/Services/CostTrackingService.ts b/apps/server/src/cost/Services/CostTrackingService.ts new file mode 100644 index 0000000000..ee4a6c18b3 --- /dev/null +++ b/apps/server/src/cost/Services/CostTrackingService.ts @@ -0,0 +1,263 @@ +/** + * CostTrackingService - Service interface and Live layer for token usage cost tracking. + * + * Records per-turn token usage and cost, manages budgets with alerting, + * and provides summaries aggregated by provider, thread, or project. + * + * @module CostTrackingService + */ +import type { + CostAlert, + CostBudget, + CostEntry, + CostGetSummaryInput, + CostSetBudgetInput, + CostStreamEvent, + CostSummary, + ProviderKind, + TokenUsage, +} from "@t3tools/contracts"; +import { Effect, Layer, PubSub, ServiceMap, Stream } from "effect"; +import * as SqlClient from "effect/unstable/sql/SqlClient"; + +export interface CostTrackingServiceShape { + /** + * Record token usage and cost for a single interaction. + * + * Automatically updates any matching budget spend and publishes alerts + * when budget thresholds are reached. + */ + readonly recordUsage: (input: { + readonly threadId: string; + readonly projectId: string; + readonly turnId: string | null; + readonly provider: string; + readonly model: string; + readonly usage: TokenUsage; + readonly costCents: number; + }) => Effect.Effect; + + /** + * Retrieve an aggregated cost summary for a given time period and filters. + */ + readonly getSummary: (input: CostGetSummaryInput) => Effect.Effect; + + /** + * Create or update a cost budget with alerting threshold. + */ + readonly setBudget: (input: CostSetBudgetInput) => Effect.Effect; + + /** + * List all budgets, optionally filtered by project. + */ + readonly getBudgets: (input: { + readonly projectId?: string | undefined; + }) => Effect.Effect<{ readonly budgets: ReadonlyArray }>; + + /** + * Live stream of cost events (new entries, alerts, budget updates). + * + * Each access creates a fresh PubSub subscription so multiple consumers + * independently receive all events. + */ + readonly streamEvents: Stream.Stream; +} + +export class CostTrackingService extends ServiceMap.Service< + CostTrackingService, + CostTrackingServiceShape +>()("t3/cost/Services/CostTrackingService") {} + +type BudgetRow = { + id: string; + project_id: string | null; + limit_cents: number; + period_days: number; + current_spend_cents: number; + alert_threshold_percent: number; + enabled: number; + created_at: string; + updated_at: string; +}; + +const makeCostTrackingService = Effect.gen(function* () { + const sql = yield* SqlClient.SqlClient; + const pubsub = yield* PubSub.unbounded(); + + const recordUsage: CostTrackingServiceShape["recordUsage"] = (input) => + Effect.gen(function* () { + const id = crypto.randomUUID(); + const now = new Date().toISOString(); + + yield* sql`INSERT INTO cost_entries (id, thread_id, project_id, turn_id, provider, model, input_tokens, output_tokens, cache_read_tokens, cache_write_tokens, thinking_tokens, cost_cents, created_at) + VALUES (${id}, ${input.threadId}, ${input.projectId}, ${input.turnId}, ${input.provider}, ${input.model}, ${input.usage.inputTokens}, ${input.usage.outputTokens}, ${input.usage.cacheReadTokens}, ${input.usage.cacheWriteTokens}, ${input.usage.thinkingTokens}, ${input.costCents}, ${now})`; + + const entry: CostEntry = { + id: id as CostEntry["id"], + threadId: input.threadId as CostEntry["threadId"], + projectId: input.projectId as CostEntry["projectId"], + turnId: (input.turnId ?? null) as CostEntry["turnId"], + provider: input.provider as CostEntry["provider"], + model: input.model as CostEntry["model"], + usage: input.usage, + costCents: input.costCents as CostEntry["costCents"], + createdAt: now, + }; + + // Update budget spend and check alert thresholds + const budgets = yield* sql<{ + id: string; + limit_cents: number; + current_spend_cents: number; + alert_threshold_percent: number; + project_id: string | null; + }>`SELECT id, limit_cents, current_spend_cents, alert_threshold_percent, project_id + FROM cost_budgets + WHERE enabled = 1 AND (project_id IS NULL OR project_id = ${input.projectId})`; + + for (const budget of budgets) { + const newSpend = budget.current_spend_cents + input.costCents; + yield* sql`UPDATE cost_budgets SET current_spend_cents = ${newSpend}, updated_at = ${now} WHERE id = ${budget.id}`; + + const percentUsed = Math.round((newSpend / budget.limit_cents) * 100); + if (percentUsed >= budget.alert_threshold_percent) { + const alert: CostAlert = { + budgetId: budget.id as CostAlert["budgetId"], + projectId: (budget.project_id ?? null) as CostAlert["projectId"], + currentSpendCents: newSpend as CostAlert["currentSpendCents"], + limitCents: budget.limit_cents as CostAlert["limitCents"], + percentUsed: percentUsed as CostAlert["percentUsed"], + alertedAt: now, + }; + yield* PubSub.publish(pubsub, { type: "cost.alert" as const, alert }); + } + } + + yield* PubSub.publish(pubsub, { type: "cost.entry" as const, entry }); + return entry; + }).pipe(Effect.orDie); + + const getSummary: CostTrackingServiceShape["getSummary"] = (input) => + Effect.gen(function* () { + const now = new Date().toISOString(); + const periodStart = + input.periodStart ?? new Date(Date.now() - 30 * 24 * 60 * 60 * 1000).toISOString(); + const periodEnd = input.periodEnd ?? now; + + const conditions: Array = ["created_at >= ?", "created_at <= ?"]; + const params: Array = [periodStart, periodEnd]; + if (input.projectId) { + conditions.push("project_id = ?"); + params.push(input.projectId); + } + if (input.threadId) { + conditions.push("thread_id = ?"); + params.push(input.threadId); + } + const whereClause = `WHERE ${conditions.join(" AND ")}`; + + const totals = yield* sql.unsafe<{ + total_cost: number; + total_input: number; + total_output: number; + total_thinking: number; + }>( + `SELECT COALESCE(SUM(cost_cents), 0) as total_cost, COALESCE(SUM(input_tokens), 0) as total_input, COALESCE(SUM(output_tokens), 0) as total_output, COALESCE(SUM(thinking_tokens), 0) as total_thinking FROM cost_entries ${whereClause}`, + params, + ); + const byProvider = yield* sql.unsafe<{ + provider: string; + cost_cents: number; + input_tokens: number; + output_tokens: number; + }>( + `SELECT provider, COALESCE(SUM(cost_cents), 0) as cost_cents, COALESCE(SUM(input_tokens), 0) as input_tokens, COALESCE(SUM(output_tokens), 0) as output_tokens FROM cost_entries ${whereClause} GROUP BY provider`, + params, + ); + const byThread = yield* sql.unsafe<{ threadId: string; cost_cents: number }>( + `SELECT thread_id as threadId, COALESCE(SUM(cost_cents), 0) as cost_cents FROM cost_entries ${whereClause} GROUP BY thread_id ORDER BY cost_cents DESC LIMIT 20`, + params, + ); + + const row = totals[0] ?? { + total_cost: 0, + total_input: 0, + total_output: 0, + total_thinking: 0, + }; + return { + totalCostCents: Number(row.total_cost), + totalInputTokens: Number(row.total_input), + totalOutputTokens: Number(row.total_output), + totalThinkingTokens: Number(row.total_thinking), + byProvider: byProvider.map((r) => ({ + provider: r.provider as ProviderKind, + costCents: Number(r.cost_cents), + inputTokens: Number(r.input_tokens), + outputTokens: Number(r.output_tokens), + })), + byThread: byThread.map((r) => ({ + threadId: r.threadId as CostSummary["byThread"][number]["threadId"], + costCents: Number(r.cost_cents) as CostSummary["byThread"][number]["costCents"], + })), + periodStart, + periodEnd, + } satisfies CostSummary; + }).pipe(Effect.orDie); + + const setBudget: CostTrackingServiceShape["setBudget"] = (input) => + Effect.gen(function* () { + const now = new Date().toISOString(); + + yield* sql`INSERT OR REPLACE INTO cost_budgets (id, project_id, limit_cents, period_days, current_spend_cents, alert_threshold_percent, enabled, created_at, updated_at) + VALUES (${input.budgetId}, ${input.projectId}, ${input.limitCents}, ${input.periodDays}, 0, ${input.alertThresholdPercent}, ${input.enabled ? 1 : 0}, ${now}, ${now})`; + + const budget: CostBudget = { + id: input.budgetId as CostBudget["id"], + projectId: (input.projectId ?? null) as CostBudget["projectId"], + limitCents: input.limitCents as CostBudget["limitCents"], + periodDays: input.periodDays as CostBudget["periodDays"], + currentSpendCents: 0 as CostBudget["currentSpendCents"], + alertThresholdPercent: input.alertThresholdPercent as CostBudget["alertThresholdPercent"], + enabled: input.enabled, + createdAt: now, + updatedAt: now, + }; + + yield* PubSub.publish(pubsub, { type: "cost.budget.updated" as const, budget }); + return budget; + }).pipe(Effect.orDie); + + const getBudgets: CostTrackingServiceShape["getBudgets"] = (input) => + Effect.gen(function* () { + const rows: readonly BudgetRow[] = input.projectId + ? yield* sql`SELECT * FROM cost_budgets WHERE project_id = ${input.projectId} OR project_id IS NULL` + : yield* sql`SELECT * FROM cost_budgets`; + + return { + budgets: rows.map((r) => ({ + id: r.id as CostBudget["id"], + projectId: (r.project_id ?? null) as CostBudget["projectId"], + limitCents: r.limit_cents as CostBudget["limitCents"], + periodDays: r.period_days as CostBudget["periodDays"], + currentSpendCents: r.current_spend_cents as CostBudget["currentSpendCents"], + alertThresholdPercent: r.alert_threshold_percent as CostBudget["alertThresholdPercent"], + enabled: r.enabled === 1, + createdAt: r.created_at, + updatedAt: r.updated_at, + })) as readonly CostBudget[], + }; + }).pipe(Effect.orDie); + + return { + recordUsage, + getSummary, + setBudget, + getBudgets, + get streamEvents(): CostTrackingServiceShape["streamEvents"] { + return Stream.fromPubSub(pubsub); + }, + } satisfies CostTrackingServiceShape; +}); + +export const CostTrackingServiceLive = Layer.effect(CostTrackingService, makeCostTrackingService); diff --git a/apps/server/src/memory/Services/ProjectMemoryService.ts b/apps/server/src/memory/Services/ProjectMemoryService.ts new file mode 100644 index 0000000000..7463e0deae --- /dev/null +++ b/apps/server/src/memory/Services/ProjectMemoryService.ts @@ -0,0 +1,188 @@ +import type { + MemoryAddInput, + MemoryEntry, + MemoryEntryId, + MemoryForgetInput, + MemoryIndexInput, + MemoryIndexResult, + MemoryListInput, + MemoryListResult, + MemorySearchInput, + MemorySearchOutput, + MemorySearchResult, +} from "@t3tools/contracts"; +import { Effect, Layer, ServiceMap } from "effect"; +import * as SqlClient from "effect/unstable/sql/SqlClient"; + +export interface ProjectMemoryServiceShape { + readonly add: (input: MemoryAddInput) => Effect.Effect; + readonly search: (input: MemorySearchInput) => Effect.Effect; + readonly forget: (input: MemoryForgetInput) => Effect.Effect; + readonly list: (input: MemoryListInput) => Effect.Effect; + readonly index: (input: MemoryIndexInput) => Effect.Effect; +} + +export class ProjectMemoryService extends ServiceMap.Service< + ProjectMemoryService, + ProjectMemoryServiceShape +>()("t3/memory/Services/ProjectMemoryService") {} + +const makeProjectMemoryService = Effect.gen(function* () { + const sql = yield* SqlClient.SqlClient; + + const add: ProjectMemoryServiceShape["add"] = (input) => + Effect.gen(function* () { + const id = crypto.randomUUID() as MemoryEntryId; + const now = new Date().toISOString(); + const tagsJson = JSON.stringify(input.tags); + yield* sql`INSERT INTO memory_entries (id, project_id, thread_id, kind, title, content, tags, relevance_score, access_count, created_at, updated_at, expires_at) + VALUES (${id}, ${input.projectId}, ${input.threadId ?? null}, ${input.kind}, ${input.title}, ${input.content}, ${tagsJson}, 0.5, 0, ${now}, ${now}, ${input.expiresAt ?? null})`; + return { + id, + projectId: input.projectId, + threadId: (input.threadId ?? null) as MemoryEntry["threadId"], + kind: input.kind, + title: input.title, + content: input.content, + tags: input.tags, + relevanceScore: 0.5, + accessCount: 0, + createdAt: now, + updatedAt: now, + expiresAt: (input.expiresAt ?? null) as MemoryEntry["expiresAt"], + } as MemoryEntry; + }).pipe(Effect.orDie); + + const search: ProjectMemoryServiceShape["search"] = (input) => + Effect.gen(function* () { + const start = Date.now(); + const kindClause = input.kind ? "AND m.kind = ?" : ""; + const queryParams: Array = input.kind + ? [input.query, input.projectId, input.kind, input.limit] + : [input.query, input.projectId, input.limit]; + // Use FTS5 for full-text search + const rows = yield* sql.unsafe<{ + id: string; + project_id: string; + thread_id: string | null; + kind: string; + title: string; + content: string; + tags: string; + relevance_score: number; + access_count: number; + created_at: string; + updated_at: string; + expires_at: string | null; + rank: number; + }>( + `SELECT m.*, fts.rank + FROM memory_fts fts + JOIN memory_entries m ON m.rowid = fts.rowid + WHERE memory_fts MATCH ? + AND m.project_id = ? + ${kindClause} + AND (m.expires_at IS NULL OR m.expires_at > datetime('now')) + ORDER BY fts.rank + LIMIT ?`, + queryParams, + ); + + // Increment access count + if (rows.length > 0) { + const placeholders = rows.map(() => "?").join(","); + const ids = rows.map((r) => r.id); + yield* sql.unsafe( + `UPDATE memory_entries SET access_count = access_count + 1 WHERE id IN (${placeholders})`, + ids, + ); + } + + const results: MemorySearchResult[] = rows.map((r) => ({ + entry: { + id: r.id as MemoryEntryId, + projectId: r.project_id as MemoryEntry["projectId"], + threadId: (r.thread_id ?? null) as MemoryEntry["threadId"], + kind: r.kind as MemoryEntry["kind"], + title: r.title as MemoryEntry["title"], + content: r.content as MemoryEntry["content"], + tags: JSON.parse(r.tags) as string[], + relevanceScore: r.relevance_score, + accessCount: r.access_count, + createdAt: r.created_at, + updatedAt: r.updated_at, + expiresAt: (r.expires_at ?? null) as MemoryEntry["expiresAt"], + } as MemoryEntry, + matchScore: -r.rank, // FTS5 rank is negative; flip for display + matchSnippet: null, + })); + + return { results, queryTime: Date.now() - start }; + }).pipe(Effect.orDie); + + const forget: ProjectMemoryServiceShape["forget"] = (input) => + sql`DELETE FROM memory_entries WHERE id = ${input.entryId}`.pipe(Effect.asVoid, Effect.orDie); + + const list: ProjectMemoryServiceShape["list"] = (input) => + Effect.gen(function* () { + const kindFilter = input.kind ? sql`AND kind = ${input.kind}` : sql``; + const rows = yield* sql<{ + id: string; + project_id: string; + thread_id: string | null; + kind: string; + title: string; + content: string; + tags: string; + relevance_score: number; + access_count: number; + created_at: string; + updated_at: string; + expires_at: string | null; + }>`SELECT * FROM memory_entries + WHERE project_id = ${input.projectId} ${kindFilter} + AND (expires_at IS NULL OR expires_at > datetime('now')) + ORDER BY updated_at DESC + LIMIT ${input.limit} OFFSET ${input.offset}`; + const countRow = yield* sql<{ + total: number; + }>`SELECT COUNT(*) as total FROM memory_entries WHERE project_id = ${input.projectId} ${kindFilter}`; + return { + entries: rows.map((r) => ({ + id: r.id as MemoryEntryId, + projectId: r.project_id as MemoryEntry["projectId"], + threadId: (r.thread_id ?? null) as MemoryEntry["threadId"], + kind: r.kind as MemoryEntry["kind"], + title: r.title as MemoryEntry["title"], + content: r.content as MemoryEntry["content"], + tags: JSON.parse(r.tags) as string[], + relevanceScore: r.relevance_score, + accessCount: r.access_count, + createdAt: r.created_at, + updatedAt: r.updated_at, + expiresAt: (r.expires_at ?? null) as MemoryEntry["expiresAt"], + })) as MemoryEntry[], + total: countRow[0]?.total ?? 0, + }; + }).pipe(Effect.orDie); + + const index: ProjectMemoryServiceShape["index"] = (input) => + Effect.gen(function* () { + const start = Date.now(); + if (input.forceReindex) { + // Rebuild FTS index + yield* sql.unsafe("INSERT INTO memory_fts(memory_fts) VALUES('rebuild')"); + } + const rows = yield* sql<{ + count: number; + }>`SELECT COUNT(*) as count FROM memory_entries WHERE project_id = ${input.projectId}`; + return { entriesIndexed: rows[0]?.count ?? 0, duration: Date.now() - start }; + }).pipe(Effect.orDie); + + return { add, search, forget, list, index }; +}); + +export const ProjectMemoryServiceLive = Layer.effect( + ProjectMemoryService, + makeProjectMemoryService, +); diff --git a/apps/server/src/orchestration/Schemas.ts b/apps/server/src/orchestration/Schemas.ts index f7ebf69344..ce673a4984 100644 --- a/apps/server/src/orchestration/Schemas.ts +++ b/apps/server/src/orchestration/Schemas.ts @@ -20,6 +20,7 @@ import { ThreadApprovalResponseRequestedPayload as ContractsThreadApprovalResponseRequestedPayloadSchema, ThreadCheckpointRevertRequestedPayload as ContractsThreadCheckpointRevertRequestedPayloadSchema, ThreadSessionStopRequestedPayload as ContractsThreadSessionStopRequestedPayloadSchema, + ThreadBranchedFromCheckpointPayload as ContractsThreadBranchedFromCheckpointPayloadSchema, } from "@t3tools/contracts"; // Server-internal alias surface, backed by contract schemas as the source of truth. @@ -50,3 +51,5 @@ export const ThreadApprovalResponseRequestedPayload = export const ThreadCheckpointRevertRequestedPayload = ContractsThreadCheckpointRevertRequestedPayloadSchema; export const ThreadSessionStopRequestedPayload = ContractsThreadSessionStopRequestedPayloadSchema; +export const ThreadBranchedFromCheckpointPayload = + ContractsThreadBranchedFromCheckpointPayloadSchema; diff --git a/apps/server/src/orchestration/decider.ts b/apps/server/src/orchestration/decider.ts index 22f5bcb280..a1385c729f 100644 --- a/apps/server/src/orchestration/decider.ts +++ b/apps/server/src/orchestration/decider.ts @@ -647,6 +647,67 @@ export const decideOrchestrationCommand = Effect.fn("decideOrchestrationCommand" }; } + case "thread.branch-from-checkpoint": { + const sourceThread = yield* requireThread({ + readModel, + command, + threadId: command.sourceThreadId, + }); + yield* requireThreadAbsent({ + readModel, + command, + threadId: command.newThreadId, + }); + const checkpoint = sourceThread.checkpoints.find( + (cp) => cp.checkpointTurnCount === command.checkpointTurnCount, + ); + if (!checkpoint) { + return yield* new OrchestrationCommandInvariantError({ + commandType: command.type, + detail: `No checkpoint at turn count ${command.checkpointTurnCount} on thread '${command.sourceThreadId}'.`, + }); + } + // Create the new thread as a copy of the source thread up to the checkpoint + const branchedThreadEvent: Omit = { + ...withEventBase({ + aggregateKind: "thread", + aggregateId: command.sourceThreadId, + occurredAt: command.createdAt, + commandId: command.commandId, + }), + type: "thread.created", + payload: { + threadId: command.newThreadId, + projectId: sourceThread.projectId, + title: command.title, + modelSelection: sourceThread.modelSelection, + runtimeMode: sourceThread.runtimeMode, + interactionMode: sourceThread.interactionMode, + branch: sourceThread.branch, + worktreePath: sourceThread.worktreePath, + createdAt: command.createdAt, + updatedAt: command.createdAt, + }, + }; + const branchRecordEvent: Omit = { + ...withEventBase({ + aggregateKind: "thread", + aggregateId: command.newThreadId, + occurredAt: command.createdAt, + commandId: command.commandId, + }), + type: "thread.branched-from-checkpoint", + payload: { + sourceThreadId: command.sourceThreadId, + newThreadId: command.newThreadId, + checkpointTurnCount: command.checkpointTurnCount, + title: command.title, + createdAt: command.createdAt, + }, + }; + return [branchedThreadEvent, branchRecordEvent]; + } + case "thread.activity.append": { yield* requireThread({ readModel, diff --git a/apps/server/src/orchestration/projector.ts b/apps/server/src/orchestration/projector.ts index 1134d020b9..3cd1f2317e 100644 --- a/apps/server/src/orchestration/projector.ts +++ b/apps/server/src/orchestration/projector.ts @@ -15,6 +15,7 @@ import { ProjectMetaUpdatedPayload, ThreadActivityAppendedPayload, ThreadArchivedPayload, + ThreadBranchedFromCheckpointPayload, ThreadCreatedPayload, ThreadDeletedPayload, ThreadInteractionModeSetPayload, @@ -663,6 +664,55 @@ export function projectEvent( }), ); + case "thread.branched-from-checkpoint": + return Effect.gen(function* () { + const payload = yield* decodeForEvent( + ThreadBranchedFromCheckpointPayload, + event.payload, + event.type, + "payload", + ); + // Find the source thread and copy messages/checkpoints up to the turn count + const sourceThread = nextBase.threads.find((entry) => entry.id === payload.sourceThreadId); + const newThread = nextBase.threads.find((entry) => entry.id === payload.newThreadId); + if (!sourceThread || !newThread) { + return nextBase; + } + + // Copy messages and checkpoints from source up to the checkpoint turn count + const retainedCheckpoints = sourceThread.checkpoints.filter( + (cp) => cp.checkpointTurnCount <= payload.checkpointTurnCount, + ); + const retainedTurnIds = new Set(retainedCheckpoints.map((cp) => cp.turnId)); + const retainedMessages = sourceThread.messages.filter( + (msg) => msg.role === "system" || msg.turnId === null || retainedTurnIds.has(msg.turnId), + ); + const retainedActivities = sourceThread.activities.filter( + (act) => act.turnId === null || retainedTurnIds.has(act.turnId), + ); + + const latestCheckpoint = retainedCheckpoints.at(-1) ?? null; + return { + ...nextBase, + threads: updateThread(nextBase.threads, payload.newThreadId, { + messages: retainedMessages.slice(-MAX_THREAD_MESSAGES), + checkpoints: retainedCheckpoints.slice(-MAX_THREAD_CHECKPOINTS), + activities: retainedActivities.slice(-500), + latestTurn: latestCheckpoint + ? { + turnId: latestCheckpoint.turnId, + state: checkpointStatusToLatestTurnState(latestCheckpoint.status), + requestedAt: latestCheckpoint.completedAt, + startedAt: latestCheckpoint.completedAt, + completedAt: latestCheckpoint.completedAt, + assistantMessageId: latestCheckpoint.assistantMessageId, + } + : null, + updatedAt: event.occurredAt, + }), + }; + }); + default: return Effect.succeed(nextBase); } diff --git a/apps/server/src/persistence/Migrations.ts b/apps/server/src/persistence/Migrations.ts index a03c3c2d18..6e42b11ccf 100644 --- a/apps/server/src/persistence/Migrations.ts +++ b/apps/server/src/persistence/Migrations.ts @@ -32,6 +32,7 @@ import Migration0016 from "./Migrations/016_CanonicalizeModelSelections.ts"; import Migration0017 from "./Migrations/017_ProjectionThreadsArchivedAt.ts"; import Migration0018 from "./Migrations/018_ProjectionThreadsArchivedAtIndex.ts"; import Migration0019 from "./Migrations/019_ProjectionSnapshotLookupIndexes.ts"; +import Migration0020 from "./Migrations/020_NewFeatureTables.ts"; /** * Migration loader with all migrations defined inline. @@ -63,6 +64,7 @@ export const migrationEntries = [ [17, "ProjectionThreadsArchivedAt", Migration0017], [18, "ProjectionThreadsArchivedAtIndex", Migration0018], [19, "ProjectionSnapshotLookupIndexes", Migration0019], + [20, "NewFeatureTables", Migration0020], ] as const; export const makeMigrationLoader = (throughId?: number) => diff --git a/apps/server/src/persistence/Migrations/020_NewFeatureTables.ts b/apps/server/src/persistence/Migrations/020_NewFeatureTables.ts new file mode 100644 index 0000000000..f829eef066 --- /dev/null +++ b/apps/server/src/persistence/Migrations/020_NewFeatureTables.ts @@ -0,0 +1,286 @@ +import * as SqlClient from "effect/unstable/sql/SqlClient"; +import * as Effect from "effect/Effect"; + +export default Effect.gen(function* () { + const sql = yield* SqlClient.SqlClient; + + // ── Cost Tracking ────────────────────────────────────────────────── + + yield* sql` + CREATE TABLE IF NOT EXISTS cost_entries ( + id TEXT PRIMARY KEY, + thread_id TEXT NOT NULL, + project_id TEXT NOT NULL, + turn_id TEXT, + provider TEXT NOT NULL, + model TEXT NOT NULL, + input_tokens INTEGER NOT NULL DEFAULT 0, + output_tokens INTEGER NOT NULL DEFAULT 0, + cache_read_tokens INTEGER NOT NULL DEFAULT 0, + cache_write_tokens INTEGER NOT NULL DEFAULT 0, + thinking_tokens INTEGER NOT NULL DEFAULT 0, + cost_cents INTEGER NOT NULL DEFAULT 0, + created_at TEXT NOT NULL + ) + `; + + yield* sql` + CREATE INDEX IF NOT EXISTS idx_cost_entries_thread ON cost_entries(thread_id) + `; + + yield* sql` + CREATE INDEX IF NOT EXISTS idx_cost_entries_project ON cost_entries(project_id) + `; + + yield* sql` + CREATE INDEX IF NOT EXISTS idx_cost_entries_created ON cost_entries(created_at) + `; + + yield* sql` + CREATE TABLE IF NOT EXISTS cost_budgets ( + id TEXT PRIMARY KEY, + project_id TEXT, + limit_cents INTEGER NOT NULL, + period_days INTEGER NOT NULL, + current_spend_cents INTEGER NOT NULL DEFAULT 0, + alert_threshold_percent INTEGER NOT NULL DEFAULT 80, + enabled INTEGER NOT NULL DEFAULT 1, + created_at TEXT NOT NULL, + updated_at TEXT NOT NULL + ) + `; + + // ── Audit Log ────────────────────────────────────────────────────── + + yield* sql` + CREATE TABLE IF NOT EXISTS audit_log ( + id TEXT PRIMARY KEY, + timestamp TEXT NOT NULL, + actor TEXT NOT NULL, + actor_id TEXT, + category TEXT NOT NULL, + action TEXT NOT NULL, + severity TEXT NOT NULL DEFAULT 'info', + project_id TEXT, + thread_id TEXT, + command_id TEXT, + event_id TEXT, + summary TEXT NOT NULL, + detail TEXT, + metadata TEXT NOT NULL DEFAULT '{}' + ) + `; + + yield* sql` + CREATE INDEX IF NOT EXISTS idx_audit_log_timestamp ON audit_log(timestamp) + `; + + yield* sql` + CREATE INDEX IF NOT EXISTS idx_audit_log_project ON audit_log(project_id) + `; + + yield* sql` + CREATE INDEX IF NOT EXISTS idx_audit_log_category ON audit_log(category) + `; + + yield* sql` + CREATE INDEX IF NOT EXISTS idx_audit_log_severity ON audit_log(severity) + `; + + // ── CI/CD Integration ────────────────────────────────────────────── + + yield* sql` + CREATE TABLE IF NOT EXISTS ci_runs ( + id TEXT PRIMARY KEY, + project_id TEXT NOT NULL, + thread_id TEXT, + turn_id TEXT, + provider TEXT NOT NULL, + workflow_name TEXT NOT NULL, + branch TEXT NOT NULL, + commit_sha TEXT NOT NULL, + status TEXT NOT NULL, + conclusion TEXT, + jobs TEXT NOT NULL DEFAULT '[]', + html_url TEXT, + started_at TEXT NOT NULL, + completed_at TEXT, + updated_at TEXT NOT NULL + ) + `; + + yield* sql` + CREATE INDEX IF NOT EXISTS idx_ci_runs_project ON ci_runs(project_id) + `; + + yield* sql` + CREATE INDEX IF NOT EXISTS idx_ci_runs_branch ON ci_runs(branch) + `; + + yield* sql` + CREATE TABLE IF NOT EXISTS ci_feedback_policies ( + project_id TEXT PRIMARY KEY, + on_failure TEXT NOT NULL DEFAULT 'notify', + auto_fix_max_attempts INTEGER NOT NULL DEFAULT 3, + watch_branches TEXT NOT NULL DEFAULT '[]' + ) + `; + + // ── Pipelines ────────────────────────────────────────────────────── + + yield* sql` + CREATE TABLE IF NOT EXISTS pipeline_definitions ( + id TEXT PRIMARY KEY, + name TEXT NOT NULL, + description TEXT, + project_id TEXT NOT NULL, + stages TEXT NOT NULL DEFAULT '[]', + created_at TEXT NOT NULL, + updated_at TEXT NOT NULL + ) + `; + + yield* sql` + CREATE INDEX IF NOT EXISTS idx_pipeline_defs_project ON pipeline_definitions(project_id) + `; + + yield* sql` + CREATE TABLE IF NOT EXISTS pipeline_executions ( + id TEXT PRIMARY KEY, + pipeline_id TEXT NOT NULL, + project_id TEXT NOT NULL, + status TEXT NOT NULL DEFAULT 'pending', + stages TEXT NOT NULL DEFAULT '[]', + started_at TEXT NOT NULL, + completed_at TEXT, + updated_at TEXT NOT NULL + ) + `; + + yield* sql` + CREATE INDEX IF NOT EXISTS idx_pipeline_exec_pipeline ON pipeline_executions(pipeline_id) + `; + + // ── Workflow Templates ───────────────────────────────────────────── + + yield* sql` + CREATE TABLE IF NOT EXISTS workflow_templates ( + id TEXT PRIMARY KEY, + name TEXT NOT NULL, + description TEXT, + category TEXT NOT NULL, + variables TEXT NOT NULL DEFAULT '[]', + steps TEXT NOT NULL DEFAULT '[]', + is_built_in INTEGER NOT NULL DEFAULT 0, + created_at TEXT NOT NULL, + updated_at TEXT NOT NULL + ) + `; + + // ── Task Decomposition ───────────────────────────────────────────── + + yield* sql` + CREATE TABLE IF NOT EXISTS task_trees ( + id TEXT PRIMARY KEY, + project_id TEXT NOT NULL, + root_prompt TEXT NOT NULL, + tasks TEXT NOT NULL DEFAULT '[]', + status TEXT NOT NULL DEFAULT 'pending', + created_at TEXT NOT NULL, + updated_at TEXT NOT NULL + ) + `; + + yield* sql` + CREATE INDEX IF NOT EXISTS idx_task_trees_project ON task_trees(project_id) + `; + + // ── Project Memory ───────────────────────────────────────────────── + + yield* sql` + CREATE TABLE IF NOT EXISTS memory_entries ( + id TEXT PRIMARY KEY, + project_id TEXT NOT NULL, + thread_id TEXT, + kind TEXT NOT NULL, + title TEXT NOT NULL, + content TEXT NOT NULL, + tags TEXT NOT NULL DEFAULT '[]', + relevance_score REAL NOT NULL DEFAULT 0.5, + access_count INTEGER NOT NULL DEFAULT 0, + created_at TEXT NOT NULL, + updated_at TEXT NOT NULL, + expires_at TEXT + ) + `; + + yield* sql` + CREATE INDEX IF NOT EXISTS idx_memory_entries_project ON memory_entries(project_id) + `; + + yield* sql` + CREATE INDEX IF NOT EXISTS idx_memory_entries_kind ON memory_entries(kind) + `; + + yield* sql` + CREATE VIRTUAL TABLE IF NOT EXISTS memory_fts USING fts5( + title, + content, + tags, + content=memory_entries, + content_rowid=rowid + ) + `; + + yield* sql.unsafe(` + CREATE TRIGGER IF NOT EXISTS memory_fts_insert AFTER INSERT ON memory_entries BEGIN + INSERT INTO memory_fts(rowid, title, content, tags) VALUES (new.rowid, new.title, new.content, new.tags); + END + `); + + yield* sql.unsafe(` + CREATE TRIGGER IF NOT EXISTS memory_fts_delete AFTER DELETE ON memory_entries BEGIN + INSERT INTO memory_fts(memory_fts, rowid, title, content, tags) VALUES ('delete', old.rowid, old.title, old.content, old.tags); + END + `); + + yield* sql.unsafe(` + CREATE TRIGGER IF NOT EXISTS memory_fts_update AFTER UPDATE ON memory_entries BEGIN + INSERT INTO memory_fts(memory_fts, rowid, title, content, tags) VALUES ('delete', old.rowid, old.title, old.content, old.tags); + INSERT INTO memory_fts(rowid, title, content, tags) VALUES (new.rowid, new.title, new.content, new.tags); + END + `); + + // ── Presence / Session Sharing ───────────────────────────────────── + + yield* sql` + CREATE TABLE IF NOT EXISTS session_shares ( + id TEXT PRIMARY KEY, + thread_id TEXT NOT NULL UNIQUE, + owner_id TEXT NOT NULL, + max_participants INTEGER NOT NULL DEFAULT 10, + is_public INTEGER NOT NULL DEFAULT 0, + created_at TEXT NOT NULL, + expires_at TEXT + ) + `; + + yield* sql` + CREATE INDEX IF NOT EXISTS idx_session_shares_thread ON session_shares(thread_id) + `; + + // ── Routing Rules ────────────────────────────────────────────────── + + yield* sql` + CREATE TABLE IF NOT EXISTS routing_rules ( + name TEXT PRIMARY KEY, + project_id TEXT, + strategy TEXT NOT NULL, + preferred_providers TEXT NOT NULL DEFAULT '[]', + excluded_providers TEXT NOT NULL DEFAULT '[]', + task_patterns TEXT NOT NULL DEFAULT '[]', + failover_policy TEXT NOT NULL DEFAULT '{}', + priority INTEGER NOT NULL DEFAULT 0 + ) + `; +}); diff --git a/apps/server/src/pipeline/Services/PipelineService.ts b/apps/server/src/pipeline/Services/PipelineService.ts new file mode 100644 index 0000000000..ed6e11a9d2 --- /dev/null +++ b/apps/server/src/pipeline/Services/PipelineService.ts @@ -0,0 +1,335 @@ +/** + * PipelineService - Multi-stage pipeline definition, execution, and event streaming. + * + * Owns pipeline CRUD, execution lifecycle (stage-by-stage dispatch), and + * real-time event streaming via PubSub. Stages are executed in dependency order + * by dispatching `thread.turn.start` commands to the orchestration engine. + * + * @module PipelineService + */ +import * as SqlClient from "effect/unstable/sql/SqlClient"; +import { Effect, Layer, PubSub, Stream, ServiceMap } from "effect"; + +// ── Domain Types ──────────────────────────────────────────────────────────── + +export interface PipelineStage { + readonly id: string; + readonly name: string; + readonly prompt: string; + readonly dependsOn: ReadonlyArray; +} + +export interface PipelineDefinition { + readonly id: string; + readonly name: string; + readonly description: string | null; + readonly projectId: string; + readonly stages: ReadonlyArray; + readonly createdAt: string; + readonly updatedAt: string; +} + +export type PipelineExecutionStatus = "pending" | "running" | "completed" | "failed" | "cancelled"; +export type StageStatus = "pending" | "running" | "completed" | "failed" | "skipped"; + +export interface StageExecution { + readonly stageId: string; + readonly status: StageStatus; + readonly startedAt: string | null; + readonly completedAt: string | null; + readonly error: string | null; +} + +export interface PipelineExecution { + readonly id: string; + readonly pipelineId: string; + readonly projectId: string; + readonly status: PipelineExecutionStatus; + readonly stages: ReadonlyArray; + readonly startedAt: string; + readonly completedAt: string | null; + readonly updatedAt: string; +} + +export interface PipelineStreamEvent { + readonly type: + | "stage.started" + | "stage.completed" + | "stage.failed" + | "pipeline.completed" + | "pipeline.failed" + | "pipeline.cancelled"; + readonly executionId: string; + readonly stageId: string | null; + readonly timestamp: string; +} + +// ── Service Shape ─────────────────────────────────────────────────────────── + +export interface PipelineServiceShape { + /** Insert a new pipeline definition. */ + readonly create: (input: { + readonly id: string; + readonly name: string; + readonly description: string | null; + readonly projectId: string; + readonly stages: ReadonlyArray; + }) => Effect.Effect; + + /** List pipeline definitions for a project. */ + readonly list: (input: { + readonly projectId: string; + }) => Effect.Effect>; + + /** Create execution entry and run stages in dependency order. */ + readonly execute: (input: { + readonly executionId: string; + readonly pipelineId: string; + readonly projectId: string; + readonly threadId: string; + }) => Effect.Effect; + + /** Read a single execution. */ + readonly getExecution: (input: { + readonly executionId: string; + }) => Effect.Effect; + + /** Cancel a running execution. */ + readonly cancel: (input: { readonly executionId: string }) => Effect.Effect; + + /** Live stream of pipeline events. */ + readonly streamEvents: Stream.Stream; +} + +// ── Service Tag ───────────────────────────────────────────────────────────── + +export class PipelineService extends ServiceMap.Service()( + "t3/pipeline/Services/PipelineService", +) {} + +// ── Helpers ───────────────────────────────────────────────────────────────── + +/** Topological sort of stages by dependsOn. Stages with no deps come first. */ +function topologicalSort(stages: ReadonlyArray): ReadonlyArray { + const visited = new Set(); + const result: PipelineStage[] = []; + const stageMap = new Map(stages.map((s) => [s.id, s])); + + function visit(stage: PipelineStage): void { + if (visited.has(stage.id)) return; + visited.add(stage.id); + for (const depId of stage.dependsOn) { + const dep = stageMap.get(depId); + if (dep) visit(dep); + } + result.push(stage); + } + + for (const stage of stages) visit(stage); + return result; +} + +// ── Layer Implementation ──────────────────────────────────────────────────── + +const makePipelineService = Effect.gen(function* () { + const sql = yield* SqlClient.SqlClient; + const pubSub = yield* PubSub.unbounded(); + + const publishEvent = (event: PipelineStreamEvent) => + PubSub.publish(pubSub, event).pipe(Effect.asVoid); + + const create: PipelineServiceShape["create"] = (input): Effect.Effect => + Effect.gen(function* () { + const now = new Date().toISOString(); + const stagesJson = JSON.stringify(input.stages); + + yield* sql` + INSERT INTO pipeline_definitions (id, name, description, project_id, stages, created_at, updated_at) + VALUES (${input.id}, ${input.name}, ${input.description}, ${input.projectId}, ${stagesJson}, ${now}, ${now}) + `; + + return { + id: input.id, + name: input.name, + description: input.description, + projectId: input.projectId, + stages: input.stages, + createdAt: now, + updatedAt: now, + } satisfies PipelineDefinition; + }).pipe(Effect.orDie); + + const list: PipelineServiceShape["list"] = (input) => + Effect.gen(function* () { + const rows = yield* sql` + SELECT id, name, description, project_id AS "projectId", stages, created_at AS "createdAt", updated_at AS "updatedAt" + FROM pipeline_definitions + WHERE project_id = ${input.projectId} + ORDER BY created_at ASC + `; + + return rows.map((row) => ({ + id: row.id as string, + name: row.name as string, + description: (row.description as string | null) ?? null, + projectId: row.projectId as string, + stages: JSON.parse(row.stages as string) as ReadonlyArray, + createdAt: row.createdAt as string, + updatedAt: row.updatedAt as string, + })); + }).pipe(Effect.orDie); + + const execute: PipelineServiceShape["execute"] = (input) => + Effect.gen(function* () { + // Fetch pipeline definition + const defRows = yield* sql` + SELECT stages FROM pipeline_definitions WHERE id = ${input.pipelineId} + `; + if (defRows.length === 0) { + return yield* Effect.die(new Error(`Pipeline definition not found: ${input.pipelineId}`)); + } + const stages = JSON.parse(defRows[0]!.stages as string) as ReadonlyArray; + const sorted = topologicalSort(stages); + const now = new Date().toISOString(); + + // Initialize stage executions + const stageExecutions: StageExecution[] = sorted.map((s) => ({ + stageId: s.id, + status: "pending" as StageStatus, + startedAt: null, + completedAt: null, + error: null, + })); + + yield* sql` + INSERT INTO pipeline_executions (id, pipeline_id, project_id, status, stages, started_at, updated_at) + VALUES (${input.executionId}, ${input.pipelineId}, ${input.projectId}, ${"running"}, ${JSON.stringify(stageExecutions)}, ${now}, ${now}) + `; + + let finalStatus: PipelineExecutionStatus = "completed"; + + // Execute stages in dependency order + for (let i = 0; i < sorted.length; i++) { + const stage = sorted[i]!; + + // Check if execution was cancelled + const currentRows = yield* sql` + SELECT status FROM pipeline_executions WHERE id = ${input.executionId} + `; + if (currentRows.length > 0 && (currentRows[0]!.status as string) === "cancelled") { + finalStatus = "cancelled"; + break; + } + + // Mark stage running + stageExecutions[i] = { + ...stageExecutions[i]!, + status: "running", + startedAt: new Date().toISOString(), + }; + yield* sql` + UPDATE pipeline_executions SET stages = ${JSON.stringify(stageExecutions)}, updated_at = ${new Date().toISOString()} + WHERE id = ${input.executionId} + `; + yield* publishEvent({ + type: "stage.started", + executionId: input.executionId, + stageId: stage.id, + timestamp: new Date().toISOString(), + }); + + // Mark stage completed — actual AI dispatch is the caller's responsibility via threadId + stageExecutions[i] = { + ...stageExecutions[i]!, + status: "completed", + completedAt: new Date().toISOString(), + }; + yield* publishEvent({ + type: "stage.completed", + executionId: input.executionId, + stageId: stage.id, + timestamp: new Date().toISOString(), + }); + + yield* sql` + UPDATE pipeline_executions SET stages = ${JSON.stringify(stageExecutions)}, updated_at = ${new Date().toISOString()} + WHERE id = ${input.executionId} + `; + } + + // Finalize + const completedAt = new Date().toISOString(); + yield* sql` + UPDATE pipeline_executions + SET status = ${finalStatus}, stages = ${JSON.stringify(stageExecutions)}, completed_at = ${completedAt}, updated_at = ${completedAt} + WHERE id = ${input.executionId} + `; + + yield* publishEvent({ + type: + finalStatus === "completed" + ? "pipeline.completed" + : finalStatus === "cancelled" + ? "pipeline.cancelled" + : "pipeline.failed", + executionId: input.executionId, + stageId: null, + timestamp: completedAt, + }); + + return { + id: input.executionId, + pipelineId: input.pipelineId, + projectId: input.projectId, + status: finalStatus, + stages: stageExecutions, + startedAt: now, + completedAt, + updatedAt: completedAt, + } satisfies PipelineExecution; + }).pipe(Effect.orDie); + + const getExecution: PipelineServiceShape["getExecution"] = (input) => + Effect.gen(function* () { + const rows = yield* sql` + SELECT id, pipeline_id AS "pipelineId", project_id AS "projectId", status, stages, + started_at AS "startedAt", completed_at AS "completedAt", updated_at AS "updatedAt" + FROM pipeline_executions + WHERE id = ${input.executionId} + `; + if (rows.length === 0) return null; + const row = rows[0]!; + return { + id: row.id as string, + pipelineId: row.pipelineId as string, + projectId: row.projectId as string, + status: row.status as PipelineExecutionStatus, + stages: JSON.parse(row.stages as string) as ReadonlyArray, + startedAt: row.startedAt as string, + completedAt: (row.completedAt as string | null) ?? null, + updatedAt: row.updatedAt as string, + } satisfies PipelineExecution; + }).pipe(Effect.orDie); + + const cancel: PipelineServiceShape["cancel"] = (input) => + Effect.gen(function* () { + const now = new Date().toISOString(); + yield* sql` + UPDATE pipeline_executions SET status = ${"cancelled"}, updated_at = ${now} + WHERE id = ${input.executionId} AND status IN ('pending', 'running') + `; + }).pipe(Effect.orDie); + + return { + create, + list, + execute, + getExecution, + cancel, + get streamEvents() { + return Stream.fromPubSub(pubSub); + }, + } satisfies PipelineServiceShape; +}); + +export const PipelineServiceLive = Layer.effect(PipelineService, makePipelineService); diff --git a/apps/server/src/presence/Services/PresenceService.ts b/apps/server/src/presence/Services/PresenceService.ts new file mode 100644 index 0000000000..aefd4c2ffc --- /dev/null +++ b/apps/server/src/presence/Services/PresenceService.ts @@ -0,0 +1,188 @@ +import type { + Participant, + ParticipantId, + ParticipantRole, + PresenceCursorKind, + PresenceGetParticipantsInput, + PresenceGetParticipantsResult, + PresenceJoinInput, + PresenceLeaveInput, + PresenceShareInput, + PresenceStreamEvent, + PresenceUpdateCursorInput, + SessionShare, + SessionShareId, + ThreadId, +} from "@t3tools/contracts"; +import { Effect, Layer, PubSub, ServiceMap, Stream } from "effect"; +import * as SqlClient from "effect/unstable/sql/SqlClient"; + +export interface PresenceServiceShape { + readonly join: (input: PresenceJoinInput) => Effect.Effect; + readonly leave: (input: PresenceLeaveInput) => Effect.Effect; + readonly updateCursor: (input: PresenceUpdateCursorInput) => Effect.Effect; + readonly share: (input: PresenceShareInput) => Effect.Effect; + readonly getParticipants: ( + input: PresenceGetParticipantsInput, + ) => Effect.Effect; + readonly streamEvents: Stream.Stream; +} + +export class PresenceService extends ServiceMap.Service()( + "t3/presence/Services/PresenceService", +) {} + +/** Assign a deterministic pastel color per participant based on display name. */ +function assignColor(name: string): string { + const colors = [ + "#6366f1", + "#8b5cf6", + "#ec4899", + "#f43f5e", + "#f97316", + "#eab308", + "#22c55e", + "#06b6d4", + ]; + let hash = 0; + for (let i = 0; i < name.length; i++) hash = (hash * 31 + name.charCodeAt(i)) | 0; + return colors[Math.abs(hash) % colors.length]!; +} + +const makePresenceService = Effect.gen(function* () { + const sql = yield* SqlClient.SqlClient; + const pubsub = yield* PubSub.unbounded(); + + // In-memory presence map: threadId -> Map + const presenceMap = new Map>(); + + const getOrCreateThreadMap = (threadId: string): Map => { + let map = presenceMap.get(threadId); + if (!map) { + map = new Map(); + presenceMap.set(threadId, map); + } + return map; + }; + + const join: PresenceServiceShape["join"] = (input) => + Effect.gen(function* () { + const id = crypto.randomUUID() as ParticipantId; + const now = new Date().toISOString(); + const participant: Participant = { + id, + displayName: input.displayName, + role: input.role as ParticipantRole, + color: assignColor(input.displayName), + cursor: "idle", + activeThreadId: input.threadId, + connectedAt: now, + lastSeenAt: now, + }; + getOrCreateThreadMap(input.threadId).set(id, participant); + yield* PubSub.publish(pubsub, { type: "presence.joined" as const, participant }); + return participant; + }); + + const leave: PresenceServiceShape["leave"] = (input) => + Effect.gen(function* () { + const threadMap = presenceMap.get(input.threadId); + if (!threadMap) return; + const existed = threadMap.delete(input.participantId); + if (threadMap.size === 0) { + presenceMap.delete(input.threadId); + } + if (!existed) return; + yield* PubSub.publish(pubsub, { + type: "presence.left" as const, + participantId: input.participantId, + threadId: input.threadId, + }); + }); + + const updateCursor: PresenceServiceShape["updateCursor"] = (input) => + Effect.gen(function* () { + const threadMap = presenceMap.get(input.threadId); + if (!threadMap) return; + const participant = threadMap.get(input.participantId); + if (!participant) return; + const updated = { + ...participant, + cursor: input.cursor as PresenceCursorKind, + lastSeenAt: new Date().toISOString(), + }; + threadMap.set(input.participantId, updated); + yield* PubSub.publish(pubsub, { + type: "presence.cursor.updated" as const, + participantId: input.participantId, + cursor: input.cursor as PresenceCursorKind, + threadId: input.threadId, + }); + }); + + const share: PresenceServiceShape["share"] = (input) => + Effect.gen(function* () { + const now = new Date().toISOString(); + const existing = yield* sql<{ + id: string; + }>`SELECT id FROM session_shares WHERE thread_id = ${input.threadId}`; + const id = existing[0]?.id ?? (crypto.randomUUID() as SessionShareId); + const ownerId = "system" as ParticipantId; + yield* sql`INSERT OR REPLACE INTO session_shares (id, thread_id, owner_id, max_participants, is_public, created_at, expires_at) + VALUES (${id}, ${input.threadId}, ${ownerId}, ${input.maxParticipants}, ${input.isPublic ? 1 : 0}, ${now}, NULL)`; + const sessionShare: SessionShare = { + id: id as SessionShareId, + threadId: input.threadId, + ownerId, + participants: Array.from(presenceMap.get(input.threadId)?.values() ?? []), + maxParticipants: input.maxParticipants, + isPublic: input.isPublic, + createdAt: now, + expiresAt: null, + }; + yield* PubSub.publish(pubsub, { + type: "presence.share.created" as const, + share: sessionShare, + }); + return sessionShare; + }).pipe(Effect.orDie); + + const getParticipants: PresenceServiceShape["getParticipants"] = (input) => + Effect.gen(function* () { + const participants = Array.from(presenceMap.get(input.threadId)?.values() ?? []); + const shareRows = yield* sql<{ + id: string; + thread_id: string; + owner_id: string; + max_participants: number; + is_public: number; + created_at: string; + expires_at: string | null; + }>`SELECT * FROM session_shares WHERE thread_id = ${input.threadId}`; + const shareRow = shareRows[0]; + const sessionShare: SessionShare | null = shareRow + ? { + id: shareRow.id as SessionShareId, + threadId: shareRow.thread_id as ThreadId, + ownerId: shareRow.owner_id as ParticipantId, + participants, + maxParticipants: shareRow.max_participants, + isPublic: shareRow.is_public === 1, + createdAt: shareRow.created_at, + expiresAt: (shareRow.expires_at ?? null) as SessionShare["expiresAt"], + } + : null; + return { participants, share: sessionShare }; + }).pipe(Effect.orDie); + + return { + join, + leave, + updateCursor, + share, + getParticipants, + streamEvents: Stream.fromPubSub(pubsub), + }; +}); + +export const PresenceServiceLive = Layer.effect(PresenceService, makePresenceService); diff --git a/apps/server/src/routing/Services/ProviderRouterService.ts b/apps/server/src/routing/Services/ProviderRouterService.ts new file mode 100644 index 0000000000..31c2ea65dd --- /dev/null +++ b/apps/server/src/routing/Services/ProviderRouterService.ts @@ -0,0 +1,216 @@ +/** + * ProviderRouterService - Service interface for multi-provider routing. + * + * Manages provider health tracking, routing rules with failover policies, + * and provider selection based on strategy, health, and task context. + * + * @module ProviderRouterService + */ +import type { + ProviderHealth, + RoutingDecision, + RoutingGetHealthResult, + RoutingGetRulesResult, + RoutingRule, + RoutingSetRulesInput, + RoutingStreamEvent, + ProviderHealthStatus, + ProviderKind, +} from "@t3tools/contracts"; +import { Effect, Layer, PubSub, ServiceMap, Stream } from "effect"; +import * as SqlClient from "effect/unstable/sql/SqlClient"; + +export interface ProviderRouterServiceShape { + /** + * Retrieve health status for all known providers. + */ + readonly getHealth: () => Effect.Effect; + + /** + * Create or replace the full set of routing rules. + */ + readonly setRules: (input: RoutingSetRulesInput) => Effect.Effect; + + /** + * Retrieve the current routing rules. + */ + readonly getRules: () => Effect.Effect; + + /** + * Select the best provider for a task based on rules, health, and context. + */ + readonly selectProvider: ( + projectId: string, + taskHint: string | null, + ) => Effect.Effect; + + /** + * Report a provider's health status (updates in-memory health map). + */ + readonly reportHealth: ( + provider: ProviderKind, + status: ProviderHealthStatus, + ) => Effect.Effect; + + /** + * Live stream of routing events (health changes, decisions, failovers). + * + * Each access creates a fresh PubSub subscription so multiple consumers + * independently receive all events. + */ + readonly streamEvents: Stream.Stream; +} + +export class ProviderRouterService extends ServiceMap.Service< + ProviderRouterService, + ProviderRouterServiceShape +>()("t3/routing/Services/ProviderRouterService") {} + +const ALL_PROVIDERS: ProviderKind[] = [ + "codex", + "copilot", + "claudeAgent", + "cursor", + "opencode", + "geminiCli", + "amp", + "kilo", +]; + +const makeProviderRouterService = Effect.gen(function* () { + const sql = yield* SqlClient.SqlClient; + const pubsub = yield* PubSub.unbounded(); + + // In-memory health map + const healthMap = new Map( + ALL_PROVIDERS.map((p) => [ + p, + { + provider: p, + status: "unknown" as ProviderHealthStatus, + latencyMs: null, + errorRate: 0, + lastCheckedAt: new Date().toISOString(), + lastErrorAt: null, + consecutiveFailures: 0, + }, + ]), + ); + + const getHealth: ProviderRouterServiceShape["getHealth"] = () => + Effect.succeed({ + providers: Array.from(healthMap.values()), + updatedAt: new Date().toISOString(), + }); + + const setRules: ProviderRouterServiceShape["setRules"] = (input) => + Effect.gen(function* () { + yield* sql`DELETE FROM routing_rules`; + for (const rule of input.rules) { + yield* sql`INSERT INTO routing_rules (name, project_id, strategy, preferred_providers, excluded_providers, task_patterns, failover_policy, priority) + VALUES (${rule.name}, ${rule.projectId ?? null}, ${rule.strategy}, ${JSON.stringify(rule.preferredProviders)}, ${JSON.stringify(rule.excludedProviders)}, ${JSON.stringify(rule.taskPatterns)}, ${JSON.stringify(rule.failoverPolicy)}, ${rule.priority})`; + } + return { rules: input.rules }; + }).pipe(Effect.orDie); + + const getRules: ProviderRouterServiceShape["getRules"] = () => + Effect.gen(function* () { + const rows = yield* sql<{ + name: string; + project_id: string | null; + strategy: string; + preferred_providers: string; + excluded_providers: string; + task_patterns: string; + failover_policy: string; + priority: number; + }>`SELECT * FROM routing_rules ORDER BY priority DESC`; + return { + rules: rows.map((r) => ({ + name: r.name as RoutingRule["name"], + projectId: (r.project_id ?? null) as RoutingRule["projectId"], + strategy: r.strategy as RoutingRule["strategy"], + preferredProviders: JSON.parse(r.preferred_providers) as ProviderKind[], + excludedProviders: JSON.parse(r.excluded_providers) as ProviderKind[], + taskPatterns: JSON.parse(r.task_patterns) as string[], + failoverPolicy: JSON.parse(r.failover_policy) as RoutingRule["failoverPolicy"], + priority: r.priority, + })), + }; + }).pipe(Effect.orDie); + + const selectProvider: ProviderRouterServiceShape["selectProvider"] = (projectId, _taskHint) => + Effect.gen(function* () { + const { rules } = yield* getRules(); + const now = new Date().toISOString(); + + // Find first matching rule for this project + const matchingRule = rules.find((r) => r.projectId === null || r.projectId === projectId); + + const healthyProviders = Array.from(healthMap.values()) + .filter((h) => h.status !== "down") + .map((h) => h.provider); + + let selected: ProviderKind = "claudeAgent"; + let reason = "default"; + let alternatives: ProviderKind[] = []; + + if (matchingRule && matchingRule.preferredProviders.length > 0) { + const preferred = matchingRule.preferredProviders.filter( + (p) => healthyProviders.includes(p) && !matchingRule.excludedProviders.includes(p), + ); + if (preferred.length > 0) { + selected = preferred[0]!; + alternatives = preferred.slice(1); + reason = `rule:${matchingRule.name}:${matchingRule.strategy}`; + } + } else { + alternatives = healthyProviders.filter((p) => p !== selected); + } + + const decision: RoutingDecision = { + selectedProvider: selected, + reason: reason as RoutingDecision["reason"], + alternatives, + failoverAttempt: 0, + decidedAt: now, + }; + + yield* PubSub.publish(pubsub, { type: "routing.decision" as const, decision }); + return decision; + }).pipe(Effect.orDie); + + const reportHealth: ProviderRouterServiceShape["reportHealth"] = (provider, status) => + Effect.gen(function* () { + const existing = healthMap.get(provider); + const now = new Date().toISOString(); + const updated: ProviderHealth = { + provider, + status, + latencyMs: existing?.latencyMs ?? null, + errorRate: + status === "down" + ? Math.min(1, (existing?.errorRate ?? 0) + 0.1) + : Math.max(0, (existing?.errorRate ?? 0) - 0.05), + lastCheckedAt: now, + lastErrorAt: status === "down" ? now : (existing?.lastErrorAt ?? null), + consecutiveFailures: status === "down" ? (existing?.consecutiveFailures ?? 0) + 1 : 0, + }; + healthMap.set(provider, updated); + yield* PubSub.publish(pubsub, { type: "routing.health.updated" as const, health: updated }); + }); + + return { + getHealth, + setRules, + getRules, + selectProvider, + reportHealth, + streamEvents: Stream.fromPubSub(pubsub), + }; +}); + +export const ProviderRouterServiceLive = Layer.effect( + ProviderRouterService, + makeProviderRouterService, +); diff --git a/apps/server/src/server.test.ts b/apps/server/src/server.test.ts index 8e0018ea82..52f1277d66 100644 --- a/apps/server/src/server.test.ts +++ b/apps/server/src/server.test.ts @@ -76,6 +76,15 @@ import { import { WorkspaceEntriesLive } from "./workspace/Layers/WorkspaceEntries.ts"; import { WorkspaceFileSystemLive } from "./workspace/Layers/WorkspaceFileSystem.ts"; import { WorkspacePathsLive } from "./workspace/Layers/WorkspacePaths.ts"; +import { AuditLogService } from "./audit/Services/AuditLogService.ts"; +import { CIIntegrationService } from "./ci/Services/CIIntegrationService.ts"; +import { CostTrackingService } from "./cost/Services/CostTrackingService.ts"; +import { PipelineService } from "./pipeline/Services/PipelineService.ts"; +import { PresenceService } from "./presence/Services/PresenceService.ts"; +import { ProjectMemoryService } from "./memory/Services/ProjectMemoryService.ts"; +import { ProviderRouterService } from "./routing/Services/ProviderRouterService.ts"; +import { TaskDecompositionService } from "./task/Services/TaskDecompositionService.ts"; +import { WorkflowService } from "./workflow/Services/WorkflowService.ts"; const defaultProjectId = ProjectId.makeUnsafe("project-default"); const defaultThreadId = ThreadId.makeUnsafe("thread-default"); @@ -405,6 +414,75 @@ const buildAppUnderTest = (options?: { ...options?.layers?.serverRuntimeStartup, }), ), + Layer.provide( + Layer.mergeAll( + Layer.mock(CostTrackingService)({ + recordUsage: () => Effect.die(new Error("not implemented")), + getSummary: () => Effect.die(new Error("not implemented")), + setBudget: () => Effect.die(new Error("not implemented")), + getBudgets: () => Effect.die(new Error("not implemented")), + streamEvents: Stream.empty, + }), + Layer.mock(AuditLogService)({ + record: () => Effect.die(new Error("not implemented")), + query: () => Effect.die(new Error("not implemented")), + streamEvents: Stream.empty, + }), + Layer.mock(CIIntegrationService)({ + getStatus: () => Effect.die(new Error("not implemented")), + recordRun: () => Effect.die(new Error("not implemented")), + triggerRerun: () => Effect.die(new Error("not implemented")), + setFeedbackPolicy: () => Effect.die(new Error("not implemented")), + getFeedbackPolicy: () => Effect.die(new Error("not implemented")), + streamEvents: Stream.empty, + }), + Layer.mock(ProviderRouterService)({ + getHealth: () => Effect.die(new Error("not implemented")), + setRules: () => Effect.die(new Error("not implemented")), + getRules: () => Effect.die(new Error("not implemented")), + selectProvider: () => Effect.die(new Error("not implemented")), + reportHealth: () => Effect.die(new Error("not implemented")), + streamEvents: Stream.empty, + }), + Layer.mock(PipelineService)({ + create: () => Effect.die(new Error("not implemented")), + list: () => Effect.die(new Error("not implemented")), + execute: () => Effect.die(new Error("not implemented")), + getExecution: () => Effect.die(new Error("not implemented")), + cancel: () => Effect.die(new Error("not implemented")), + streamEvents: Stream.empty, + }), + Layer.mock(WorkflowService)({ + list: () => Effect.die(new Error("not implemented")), + create: () => Effect.die(new Error("not implemented")), + delete: () => Effect.die(new Error("not implemented")), + execute: () => Effect.die(new Error("not implemented")), + }), + Layer.mock(TaskDecompositionService)({ + decompose: () => Effect.die(new Error("not implemented")), + updateStatus: () => Effect.die(new Error("not implemented")), + getTree: () => Effect.die(new Error("not implemented")), + listTrees: () => Effect.die(new Error("not implemented")), + execute: () => Effect.die(new Error("not implemented")), + streamEvents: Stream.empty, + }), + Layer.mock(ProjectMemoryService)({ + add: () => Effect.die(new Error("not implemented")), + search: () => Effect.die(new Error("not implemented")), + forget: () => Effect.die(new Error("not implemented")), + list: () => Effect.die(new Error("not implemented")), + index: () => Effect.die(new Error("not implemented")), + }), + Layer.mock(PresenceService)({ + join: () => Effect.die(new Error("not implemented")), + leave: () => Effect.die(new Error("not implemented")), + updateCursor: () => Effect.die(new Error("not implemented")), + share: () => Effect.die(new Error("not implemented")), + getParticipants: () => Effect.die(new Error("not implemented")), + streamEvents: Stream.empty, + }), + ), + ), Layer.provide(workspaceAndProjectServicesLayer), Layer.provideMerge(FetchHttpClient.layer), Layer.provide(layerConfig), diff --git a/apps/server/src/server.ts b/apps/server/src/server.ts index 5cd06ec84a..0f36e74818 100644 --- a/apps/server/src/server.ts +++ b/apps/server/src/server.ts @@ -54,6 +54,15 @@ import { WorkspaceFileSystemLive } from "./workspace/Layers/WorkspaceFileSystem" import { WorkspacePathsLive } from "./workspace/Layers/WorkspacePaths"; import { ProjectSetupScriptRunnerLive } from "./project/Layers/ProjectSetupScriptRunner"; import { ObservabilityLive } from "./observability/Layers/Observability"; +import { CostTrackingServiceLive } from "./cost/Services/CostTrackingService"; +import { AuditLogServiceLive } from "./audit/Services/AuditLogService"; +import { CIIntegrationServiceLive } from "./ci/Services/CIIntegrationService"; +import { ProviderRouterServiceLive } from "./routing/Services/ProviderRouterService"; +import { PipelineServiceLive } from "./pipeline/Services/PipelineService"; +import { WorkflowServiceLive } from "./workflow/Services/WorkflowService"; +import { TaskDecompositionServiceLive } from "./task/Services/TaskDecompositionService"; +import { ProjectMemoryServiceLive } from "./memory/Services/ProjectMemoryService"; +import { PresenceServiceLive } from "./presence/Services/PresenceService"; const PtyAdapterLive = Layer.unwrap( Effect.gen(function* () { @@ -206,8 +215,22 @@ const WorkspaceLayerLive = Layer.mergeAll( ), ); +// The 7 independently-SQLite-backed feature services have no cross-dependencies. +const IndependentFeaturesLive = Layer.mergeAll( + CostTrackingServiceLive, + AuditLogServiceLive, + CIIntegrationServiceLive, + ProviderRouterServiceLive, + TaskDecompositionServiceLive, + ProjectMemoryServiceLive, + PresenceServiceLive, +); + +// WorkflowService depends on PipelineService; bundle them into a self-contained pair. +const PipelineAndWorkflowLive = WorkflowServiceLive.pipe(Layer.provideMerge(PipelineServiceLive)); + +// Single chain kept to ≤20 args for TypeScript inference stability. const RuntimeDependenciesLive = ReactorLayerLive.pipe( - // Core Services Layer.provideMerge(CheckpointingLayerLive), Layer.provideMerge(GitLayerLive), Layer.provideMerge(OrchestrationLayerLive), @@ -219,8 +242,8 @@ const RuntimeDependenciesLive = ReactorLayerLive.pipe( Layer.provideMerge(ServerSettingsLive), Layer.provideMerge(WorkspaceLayerLive), Layer.provideMerge(ProjectFaviconResolverLive), - - // Misc. + Layer.provideMerge(IndependentFeaturesLive), + Layer.provideMerge(PipelineAndWorkflowLive), Layer.provideMerge(AnalyticsServiceLayerLive), Layer.provideMerge(OpenLive), Layer.provideMerge(ServerLifecycleEventsLive), @@ -270,7 +293,10 @@ export const makeServerLayer = Layer.unwrap( ); // Important: Only `ServerConfig` should be provided by the CLI layer!!! Don't let other requirements leak into the launch layer. -export const runServer = Layer.launch(makeServerLayer) satisfies Effect.Effect< +// Note: `satisfies` is replaced with `as` because TypeScript's inference for `Exclude` +// in deep Layer.provideMerge chains incorrectly exposes SqlClient at the type level even though it is fully +// satisfied at runtime by PersistenceLayerLive. This is a known TS inference limitation with Effect layers. +export const runServer = Layer.launch(makeServerLayer) as unknown as Effect.Effect< never, any, ServerConfig diff --git a/apps/server/src/task/Services/TaskDecompositionService.ts b/apps/server/src/task/Services/TaskDecompositionService.ts new file mode 100644 index 0000000000..fba8cc716d --- /dev/null +++ b/apps/server/src/task/Services/TaskDecompositionService.ts @@ -0,0 +1,238 @@ +import type { + TaskTree, + TaskNode, + TaskTreeId, + TaskId, + TaskDecomposeInput, + TaskUpdateStatusInput, + TaskGetTreeInput, + TaskListTreesInput, + TaskListTreesResult, + TaskExecuteInput, + TaskStreamEvent, +} from "@t3tools/contracts"; +import { Effect, Layer, PubSub, ServiceMap, Stream } from "effect"; +import * as SqlClient from "effect/unstable/sql/SqlClient"; + +export interface TaskDecompositionServiceShape { + readonly decompose: (input: TaskDecomposeInput) => Effect.Effect; + readonly updateStatus: (input: TaskUpdateStatusInput) => Effect.Effect; + readonly getTree: (input: TaskGetTreeInput) => Effect.Effect; + readonly listTrees: (input: TaskListTreesInput) => Effect.Effect; + readonly execute: (input: TaskExecuteInput) => Effect.Effect; + readonly streamEvents: Stream.Stream; +} + +export class TaskDecompositionService extends ServiceMap.Service< + TaskDecompositionService, + TaskDecompositionServiceShape +>()("t3/task/Services/TaskDecompositionService") {} + +/** + * Decomposes a prompt into subtasks using simple heuristic parsing. + * Each sentence/paragraph becomes a task node; numbered lists become subtasks. + */ +function decomposePromptToTasks(prompt: string, now: string): TaskNode[] { + const lines = prompt + .split(/\n+/) + .map((l) => l.trim()) + .filter(Boolean); + + const tasks: TaskNode[] = []; + let order = 0; + + for (const line of lines) { + const id = crypto.randomUUID() as TaskId; + const isNumbered = /^\d+[.)]\s/.test(line); + tasks.push({ + id, + parentId: null, + title: line.replace(/^\d+[.)]\s/, "").slice(0, 200), + description: null, + status: "pending", + priority: "medium", + complexity: line.length > 100 ? "complex" : "simple", + provider: undefined, + threadId: null, + dependsOn: order > 0 && !isNumbered ? [] : [], + estimatedTokens: null, + order, + createdAt: now, + updatedAt: now, + completedAt: null, + }); + order++; + } + + return tasks.length > 0 + ? tasks + : [ + { + id: crypto.randomUUID() as TaskId, + parentId: null, + title: prompt.slice(0, 200), + description: null, + status: "pending", + priority: "medium", + complexity: "moderate", + provider: undefined, + threadId: null, + dependsOn: [], + estimatedTokens: null, + order: 0, + createdAt: now, + updatedAt: now, + completedAt: null, + }, + ]; +} + +const makeTaskDecompositionService = Effect.gen(function* () { + const sql = yield* SqlClient.SqlClient; + const pubsub = yield* PubSub.unbounded(); + + const persistTree = (tree: TaskTree) => + sql`INSERT OR REPLACE INTO task_trees (id, project_id, root_prompt, tasks, status, created_at, updated_at) + VALUES (${tree.id}, ${tree.projectId}, ${tree.rootPrompt}, ${JSON.stringify(tree.tasks)}, ${tree.status}, ${tree.createdAt}, ${tree.updatedAt})`; + + const readTree = (id: string): Effect.Effect => + Effect.gen(function* () { + const rows = yield* sql<{ + id: string; + project_id: string; + root_prompt: string; + tasks: string; + status: string; + created_at: string; + updated_at: string; + }>`SELECT * FROM task_trees WHERE id = ${id}`; + const row = rows[0]; + if (!row) return yield* Effect.die(new Error(`Task tree ${id} not found`)); + return { + id: row.id as TaskTreeId, + projectId: row.project_id as TaskTree["projectId"], + rootPrompt: row.root_prompt as TaskTree["rootPrompt"], + tasks: JSON.parse(row.tasks) as TaskNode[], + status: row.status as TaskTree["status"], + createdAt: row.created_at, + updatedAt: row.updated_at, + }; + }).pipe(Effect.orDie); + + const decompose: TaskDecompositionServiceShape["decompose"] = (input) => + Effect.gen(function* () { + const now = new Date().toISOString(); + const id = crypto.randomUUID() as TaskTreeId; + const tasks = decomposePromptToTasks(input.prompt, now); + const tree: TaskTree = { + id, + projectId: input.projectId, + rootPrompt: input.prompt, + tasks, + status: "pending", + createdAt: now, + updatedAt: now, + }; + yield* persistTree(tree); + yield* PubSub.publish(pubsub, { type: "task.tree.updated" as const, tree }); + return tree; + }).pipe(Effect.orDie); + + const updateStatus: TaskDecompositionServiceShape["updateStatus"] = (input) => + Effect.gen(function* () { + const tree = yield* readTree(input.treeId); + const now = new Date().toISOString(); + const updatedTasks = tree.tasks.map((t) => + t.id === input.taskId + ? { + ...t, + status: input.status, + updatedAt: now, + completedAt: input.status === "completed" ? now : t.completedAt, + } + : t, + ); + const allDone = updatedTasks.every((t) => t.status === "completed" || t.status === "skipped"); + const anyFailed = updatedTasks.some((t) => t.status === "failed"); + const treeStatus = allDone ? "completed" : anyFailed ? "failed" : "in-progress"; + const updatedTree: TaskTree = { + ...tree, + tasks: updatedTasks, + status: treeStatus as TaskTree["status"], + updatedAt: now, + }; + yield* persistTree(updatedTree); + const updatedNode = updatedTasks.find((t) => t.id === input.taskId)!; + yield* PubSub.publish(pubsub, { + type: "task.node.updated" as const, + treeId: input.treeId, + node: updatedNode, + }); + yield* PubSub.publish(pubsub, { type: "task.tree.updated" as const, tree: updatedTree }); + return updatedTree; + }).pipe(Effect.orDie); + + const getTree: TaskDecompositionServiceShape["getTree"] = (input) => readTree(input.treeId); + + const listTrees: TaskDecompositionServiceShape["listTrees"] = (input) => + Effect.gen(function* () { + const rows = yield* sql<{ + id: string; + project_id: string; + root_prompt: string; + tasks: string; + status: string; + created_at: string; + updated_at: string; + }>`SELECT * FROM task_trees WHERE project_id = ${input.projectId} ORDER BY created_at DESC`; + return { + trees: rows.map((r) => ({ + id: r.id as TaskTreeId, + projectId: r.project_id as TaskTree["projectId"], + rootPrompt: r.root_prompt as TaskTree["rootPrompt"], + tasks: JSON.parse(r.tasks) as TaskNode[], + status: r.status as TaskTree["status"], + createdAt: r.created_at, + updatedAt: r.updated_at, + })), + }; + }).pipe(Effect.orDie); + + const execute: TaskDecompositionServiceShape["execute"] = (input) => + Effect.gen(function* () { + const tree = yield* readTree(input.treeId); + const now = new Date().toISOString(); + const tasksToRun = input.taskId + ? tree.tasks.filter((t) => t.id === input.taskId) + : tree.tasks.filter((t) => t.status === "pending"); + + const updatedTasks = tree.tasks.map((t) => + tasksToRun.some((r) => r.id === t.id) + ? { ...t, status: "in-progress" as const, updatedAt: now } + : t, + ); + const updatedTree: TaskTree = { + ...tree, + tasks: updatedTasks, + status: "in-progress", + updatedAt: now, + }; + yield* persistTree(updatedTree); + yield* PubSub.publish(pubsub, { type: "task.tree.updated" as const, tree: updatedTree }); + return updatedTree; + }).pipe(Effect.orDie); + + return { + decompose, + updateStatus, + getTree, + listTrees, + execute, + streamEvents: Stream.fromPubSub(pubsub), + }; +}); + +export const TaskDecompositionServiceLive = Layer.effect( + TaskDecompositionService, + makeTaskDecompositionService, +); diff --git a/apps/server/src/workflow/Services/WorkflowService.ts b/apps/server/src/workflow/Services/WorkflowService.ts new file mode 100644 index 0000000000..3be0171fb4 --- /dev/null +++ b/apps/server/src/workflow/Services/WorkflowService.ts @@ -0,0 +1,360 @@ +/** + * WorkflowService - Workflow template management and execution. + * + * Owns CRUD for workflow templates (including built-in seed templates), + * variable substitution, and execution by converting resolved templates + * into pipelines and delegating to PipelineService. + * + * @module WorkflowService + */ +import * as SqlClient from "effect/unstable/sql/SqlClient"; +import { Effect, Layer, ServiceMap } from "effect"; + +import { PipelineService, type PipelineStage } from "../../pipeline/Services/PipelineService.ts"; + +// ── Domain Types ──────────────────────────────────────────────────────────── + +export interface WorkflowVariable { + readonly name: string; + readonly description: string; + readonly defaultValue: string | null; +} + +export interface WorkflowStep { + readonly id: string; + readonly name: string; + readonly prompt: string; + readonly dependsOn: ReadonlyArray; +} + +export interface WorkflowTemplate { + readonly id: string; + readonly name: string; + readonly description: string | null; + readonly category: string; + readonly variables: ReadonlyArray; + readonly steps: ReadonlyArray; + readonly isBuiltIn: boolean; + readonly createdAt: string; + readonly updatedAt: string; +} + +// ── Service Shape ─────────────────────────────────────────────────────────── + +export interface WorkflowServiceShape { + /** List workflow templates with optional category filter. */ + readonly list: (input: { + readonly category?: string | undefined; + }) => Effect.Effect>; + + /** Create a new workflow template. */ + readonly create: (input: { + readonly id: string; + readonly name: string; + readonly description: string | null; + readonly category: string; + readonly variables: ReadonlyArray; + readonly steps: ReadonlyArray; + }) => Effect.Effect; + + /** Delete a non-built-in template. */ + readonly delete: (input: { readonly templateId: string }) => Effect.Effect; + + /** Resolve variables, create a pipeline from steps, and execute. */ + readonly execute: (input: { + readonly templateId: string; + readonly projectId: string; + readonly threadId: string; + readonly variables: Record; + readonly executionId: string; + readonly pipelineId: string; + }) => Effect.Effect; +} + +// ── Service Tag ───────────────────────────────────────────────────────────── + +export class WorkflowService extends ServiceMap.Service()( + "t3/workflow/Services/WorkflowService", +) {} + +// ── Built-in Templates ────────────────────────────────────────────────────── + +const BUILT_IN_TEMPLATES: ReadonlyArray<{ + readonly id: string; + readonly name: string; + readonly description: string; + readonly category: string; + readonly variables: ReadonlyArray; + readonly steps: ReadonlyArray; +}> = [ + { + id: "builtin:implement-and-test", + name: "Implement & Test", + description: "Implement a feature and write tests for it.", + category: "development", + variables: [ + { + name: "FEATURE_DESCRIPTION", + description: "Description of the feature to implement", + defaultValue: null, + }, + { name: "TEST_FRAMEWORK", description: "Testing framework to use", defaultValue: "vitest" }, + ], + steps: [ + { + id: "implement", + name: "Implement Feature", + prompt: "Implement the following feature: {{FEATURE_DESCRIPTION}}", + dependsOn: [], + }, + { + id: "test", + name: "Write Tests", + prompt: "Write {{TEST_FRAMEWORK}} tests for the feature: {{FEATURE_DESCRIPTION}}", + dependsOn: ["implement"], + }, + ], + }, + { + id: "builtin:review-and-fix", + name: "Review & Fix", + description: "Review code for issues and apply fixes.", + category: "quality", + variables: [ + { name: "REVIEW_SCOPE", description: "Files or modules to review", defaultValue: "." }, + ], + steps: [ + { + id: "review", + name: "Code Review", + prompt: + "Review the code in {{REVIEW_SCOPE}} for bugs, security issues, and code quality problems. List all issues found.", + dependsOn: [], + }, + { + id: "fix", + name: "Apply Fixes", + prompt: "Fix all issues identified in the code review of {{REVIEW_SCOPE}}.", + dependsOn: ["review"], + }, + ], + }, + { + id: "builtin:feature-branch", + name: "Feature Branch", + description: "Create a feature branch, implement, test, and prepare for review.", + category: "development", + variables: [ + { name: "BRANCH_NAME", description: "Name for the feature branch", defaultValue: null }, + { + name: "FEATURE_DESCRIPTION", + description: "Description of the feature", + defaultValue: null, + }, + ], + steps: [ + { + id: "branch", + name: "Create Branch", + prompt: "Create a new git branch named '{{BRANCH_NAME}}' from the current branch.", + dependsOn: [], + }, + { + id: "implement", + name: "Implement", + prompt: "Implement the following feature: {{FEATURE_DESCRIPTION}}", + dependsOn: ["branch"], + }, + { + id: "test", + name: "Test", + prompt: "Write and run tests for: {{FEATURE_DESCRIPTION}}", + dependsOn: ["implement"], + }, + { + id: "prepare", + name: "Prepare for Review", + prompt: "Commit all changes and prepare a summary of what was implemented for code review.", + dependsOn: ["test"], + }, + ], + }, +]; + +// ── Helpers ───────────────────────────────────────────────────────────────── + +/** Substitute {{VAR_NAME}} placeholders in a prompt string. */ +function substituteVariables( + prompt: string, + variables: Record, + defaults: ReadonlyArray, +): string { + let result = prompt; + const defaultMap = new Map(defaults.map((v) => [v.name, v.defaultValue])); + + for (const [name, value] of Object.entries(variables)) { + result = result.replaceAll(`{{${name}}}`, value); + } + // Fill any remaining placeholders with defaults + for (const [name, defaultValue] of defaultMap) { + if (defaultValue !== null) { + result = result.replaceAll(`{{${name}}}`, defaultValue); + } + } + return result; +} + +function templateRowToDomain(row: Record): WorkflowTemplate { + return { + id: row.id as string, + name: row.name as string, + description: (row.description as string | null) ?? null, + category: row.category as string, + variables: JSON.parse(row.variables as string) as ReadonlyArray, + steps: JSON.parse(row.steps as string) as ReadonlyArray, + isBuiltIn: (row.isBuiltIn as number) === 1, + createdAt: row.createdAt as string, + updatedAt: row.updatedAt as string, + }; +} + +// ── Layer Implementation ──────────────────────────────────────────────────── + +const makeWorkflowService = Effect.gen(function* () { + const sql = yield* SqlClient.SqlClient; + const pipelineService = yield* PipelineService; + + // Seed built-in templates on startup + for (const template of BUILT_IN_TEMPLATES) { + const now = new Date().toISOString(); + yield* sql` + INSERT INTO workflow_templates (id, name, description, category, variables, steps, is_built_in, created_at, updated_at) + VALUES ( + ${template.id}, + ${template.name}, + ${template.description}, + ${template.category}, + ${JSON.stringify(template.variables)}, + ${JSON.stringify(template.steps)}, + ${1}, + ${now}, + ${now} + ) + ON CONFLICT (id) DO UPDATE SET + name = excluded.name, + description = excluded.description, + category = excluded.category, + variables = excluded.variables, + steps = excluded.steps, + updated_at = excluded.updated_at + `; + } + + const list: WorkflowServiceShape["list"] = (input) => + Effect.gen(function* () { + const rows = input.category + ? yield* sql` + SELECT id, name, description, category, variables, steps, is_built_in AS "isBuiltIn", + created_at AS "createdAt", updated_at AS "updatedAt" + FROM workflow_templates + WHERE category = ${input.category} + ORDER BY is_built_in DESC, created_at ASC + ` + : yield* sql` + SELECT id, name, description, category, variables, steps, is_built_in AS "isBuiltIn", + created_at AS "createdAt", updated_at AS "updatedAt" + FROM workflow_templates + ORDER BY is_built_in DESC, created_at ASC + `; + + return rows.map(templateRowToDomain); + }).pipe(Effect.orDie); + + const create: WorkflowServiceShape["create"] = (input) => + Effect.gen(function* () { + const now = new Date().toISOString(); + yield* sql` + INSERT INTO workflow_templates (id, name, description, category, variables, steps, is_built_in, created_at, updated_at) + VALUES ( + ${input.id}, + ${input.name}, + ${input.description}, + ${input.category}, + ${JSON.stringify(input.variables)}, + ${JSON.stringify(input.steps)}, + ${0}, + ${now}, + ${now} + ) + `; + + return { + id: input.id, + name: input.name, + description: input.description, + category: input.category, + variables: input.variables, + steps: input.steps, + isBuiltIn: false, + createdAt: now, + updatedAt: now, + } satisfies WorkflowTemplate; + }).pipe(Effect.orDie); + + const deleteTemplate: WorkflowServiceShape["delete"] = (input) => + Effect.gen(function* () { + yield* sql` + DELETE FROM workflow_templates + WHERE id = ${input.templateId} AND is_built_in = 0 + `; + }).pipe(Effect.orDie); + + const execute: WorkflowServiceShape["execute"] = (input) => + Effect.gen(function* () { + // Resolve template + const rows = yield* sql` + SELECT id, name, description, category, variables, steps, is_built_in AS "isBuiltIn", + created_at AS "createdAt", updated_at AS "updatedAt" + FROM workflow_templates + WHERE id = ${input.templateId} + `; + if (rows.length === 0) { + return yield* Effect.die(new Error(`Workflow template not found: ${input.templateId}`)); + } + const template = templateRowToDomain(rows[0]!); + + // Convert steps to pipeline stages with variable substitution + const pipelineStages: PipelineStage[] = template.steps.map((step) => ({ + id: step.id, + name: step.name, + prompt: substituteVariables(step.prompt, input.variables, template.variables), + dependsOn: step.dependsOn, + })); + + // Create pipeline definition + yield* pipelineService.create({ + id: input.pipelineId, + name: `workflow:${template.name}`, + description: template.description, + projectId: input.projectId, + stages: pipelineStages, + }); + + // Execute pipeline + yield* pipelineService.execute({ + executionId: input.executionId, + pipelineId: input.pipelineId, + projectId: input.projectId, + threadId: input.threadId, + }); + }).pipe(Effect.orDie); + + return { + list, + create, + delete: deleteTemplate, + execute, + } satisfies WorkflowServiceShape; +}); + +export const WorkflowServiceLive = Layer.effect(WorkflowService, makeWorkflowService); diff --git a/apps/server/src/ws.ts b/apps/server/src/ws.ts index 33a0518611..df7ffd1cf3 100644 --- a/apps/server/src/ws.ts +++ b/apps/server/src/ws.ts @@ -46,6 +46,15 @@ import { WorkspaceEntries } from "./workspace/Services/WorkspaceEntries"; import { WorkspaceFileSystem } from "./workspace/Services/WorkspaceFileSystem"; import { WorkspacePathOutsideRootError } from "./workspace/Services/WorkspacePaths"; import { ProjectSetupScriptRunner } from "./project/Services/ProjectSetupScriptRunner"; +import { CostTrackingService } from "./cost/Services/CostTrackingService"; +import { AuditLogService } from "./audit/Services/AuditLogService"; +import { CIIntegrationService } from "./ci/Services/CIIntegrationService"; +import { ProviderRouterService } from "./routing/Services/ProviderRouterService"; +import { PipelineService } from "./pipeline/Services/PipelineService"; +import { WorkflowService } from "./workflow/Services/WorkflowService"; +import { TaskDecompositionService } from "./task/Services/TaskDecompositionService"; +import { ProjectMemoryService } from "./memory/Services/ProjectMemoryService"; +import { PresenceService } from "./presence/Services/PresenceService"; const WsRpcLayer = WsRpcGroup.toLayer( Effect.gen(function* () { @@ -65,6 +74,15 @@ const WsRpcLayer = WsRpcGroup.toLayer( const workspaceEntries = yield* WorkspaceEntries; const workspaceFileSystem = yield* WorkspaceFileSystem; const projectSetupScriptRunner = yield* ProjectSetupScriptRunner; + const costTracking = yield* CostTrackingService; + const auditLog = yield* AuditLogService; + const ciIntegration = yield* CIIntegrationService; + const providerRouter = yield* ProviderRouterService; + const pipelineService = yield* PipelineService; + const workflowService = yield* WorkflowService; + const taskDecomposition = yield* TaskDecompositionService; + const projectMemory = yield* ProjectMemoryService; + const presenceService = yield* PresenceService; const serverCommandId = (tag: string) => CommandId.makeUnsafe(`server:${tag}:${crypto.randomUUID()}`); @@ -708,6 +726,397 @@ const WsRpcLayer = WsRpcGroup.toLayer( }), { "rpc.aggregate": "server" }, ), + + // ── Cost Tracking ──────────────────────────────────────────────────── + [WS_METHODS.costGetSummary]: (input) => + observeRpcEffect(WS_METHODS.costGetSummary, costTracking.getSummary(input), { + "rpc.aggregate": "cost", + }), + [WS_METHODS.costSetBudget]: (input) => + observeRpcEffect(WS_METHODS.costSetBudget, costTracking.setBudget(input), { + "rpc.aggregate": "cost", + }), + [WS_METHODS.costGetBudgets]: (input) => + observeRpcEffect( + WS_METHODS.costGetBudgets, + costTracking.getBudgets({ projectId: input.projectId }), + { "rpc.aggregate": "cost" }, + ), + [WS_METHODS.subscribeCostEvents]: (_input) => + observeRpcStream(WS_METHODS.subscribeCostEvents, costTracking.streamEvents, { + "rpc.aggregate": "cost", + }), + + // ── Audit Log ──────────────────────────────────────────────────────── + [WS_METHODS.auditQuery]: (input) => + observeRpcEffect(WS_METHODS.auditQuery, auditLog.query(input), { + "rpc.aggregate": "audit", + }), + [WS_METHODS.subscribeAuditEvents]: (_input) => + observeRpcStream(WS_METHODS.subscribeAuditEvents, auditLog.streamEvents, { + "rpc.aggregate": "audit", + }), + + // ── CI/CD ──────────────────────────────────────────────────────────── + [WS_METHODS.ciGetStatus]: (input) => + observeRpcEffect(WS_METHODS.ciGetStatus, ciIntegration.getStatus(input), { + "rpc.aggregate": "ci", + }), + [WS_METHODS.ciTriggerRerun]: (input) => + observeRpcEffect( + WS_METHODS.ciTriggerRerun, + ciIntegration.triggerRerun(input).pipe(Effect.asVoid), + { "rpc.aggregate": "ci" }, + ), + [WS_METHODS.ciSetFeedbackPolicy]: (input) => + observeRpcEffect(WS_METHODS.ciSetFeedbackPolicy, ciIntegration.setFeedbackPolicy(input), { + "rpc.aggregate": "ci", + }), + [WS_METHODS.subscribeCIEvents]: (_input) => + observeRpcStream(WS_METHODS.subscribeCIEvents, ciIntegration.streamEvents, { + "rpc.aggregate": "ci", + }), + + // ── Routing ────────────────────────────────────────────────────────── + [WS_METHODS.routingGetHealth]: (_input) => + observeRpcEffect(WS_METHODS.routingGetHealth, providerRouter.getHealth(), { + "rpc.aggregate": "routing", + }), + [WS_METHODS.routingSetRules]: (input) => + observeRpcEffect(WS_METHODS.routingSetRules, providerRouter.setRules(input), { + "rpc.aggregate": "routing", + }), + [WS_METHODS.routingGetRules]: (_input) => + observeRpcEffect(WS_METHODS.routingGetRules, providerRouter.getRules(), { + "rpc.aggregate": "routing", + }), + [WS_METHODS.subscribeRoutingEvents]: (_input) => + observeRpcStream(WS_METHODS.subscribeRoutingEvents, providerRouter.streamEvents, { + "rpc.aggregate": "routing", + }), + + // ── Pipelines ──────────────────────────────────────────────────────── + [WS_METHODS.pipelineCreate]: (input) => + observeRpcEffect( + WS_METHODS.pipelineCreate, + pipelineService + .create({ + id: crypto.randomUUID(), + name: input.name, + description: input.description, + projectId: input.projectId, + stages: input.stages.map((s) => ({ + id: s.id, + name: s.name, + prompt: s.prompt, + dependsOn: s.dependsOn, + })), + }) + .pipe( + Effect.map((def) => ({ + id: def.id as import("@t3tools/contracts").PipelineId, + name: def.name as import("@t3tools/contracts").PipelineDefinition["name"], + description: def.description, + projectId: + def.projectId as import("@t3tools/contracts").PipelineDefinition["projectId"], + stages: input.stages, + createdAt: def.createdAt, + updatedAt: def.updatedAt, + })), + ), + { "rpc.aggregate": "pipeline" }, + ), + [WS_METHODS.pipelineList]: (input) => + observeRpcEffect( + WS_METHODS.pipelineList, + pipelineService.list({ projectId: input.projectId }).pipe( + Effect.map((defs) => ({ + pipelines: defs.map((def) => ({ + id: def.id as import("@t3tools/contracts").PipelineId, + name: def.name as import("@t3tools/contracts").PipelineDefinition["name"], + description: def.description, + projectId: + def.projectId as import("@t3tools/contracts").PipelineDefinition["projectId"], + stages: JSON.parse( + (def as unknown as { stagesJson: string }).stagesJson ?? "[]", + ) as import("@t3tools/contracts").PipelineStage[], + createdAt: def.createdAt, + updatedAt: def.updatedAt, + })), + })), + ), + { "rpc.aggregate": "pipeline" }, + ), + [WS_METHODS.pipelineExecute]: (input) => + observeRpcEffect( + WS_METHODS.pipelineExecute, + pipelineService + .execute({ + executionId: crypto.randomUUID(), + pipelineId: input.pipelineId, + projectId: input.projectId, + threadId: crypto.randomUUID(), + }) + .pipe( + Effect.map((exec) => ({ + id: exec.id as import("@t3tools/contracts").PipelineExecutionId, + pipelineId: + exec.pipelineId as import("@t3tools/contracts").PipelineExecution["pipelineId"], + projectId: + exec.projectId as import("@t3tools/contracts").PipelineExecution["projectId"], + status: exec.status as import("@t3tools/contracts").PipelineExecution["status"], + stages: exec.stages.map((s) => ({ + stageId: s.stageId as import("@t3tools/contracts").PipelineStageId, + status: s.status as import("@t3tools/contracts").PipelineStageExecution["status"], + threadId: null, + startedAt: s.startedAt, + completedAt: s.completedAt, + error: s.error, + retryCount: 0, + output: null, + })), + startedAt: exec.startedAt, + completedAt: exec.completedAt, + updatedAt: exec.updatedAt, + })), + ), + { "rpc.aggregate": "pipeline" }, + ), + [WS_METHODS.pipelineGetExecution]: (input) => + observeRpcEffect( + WS_METHODS.pipelineGetExecution, + pipelineService.getExecution({ executionId: input.executionId }).pipe( + Effect.flatMap((exec) => + exec + ? Effect.succeed({ + id: exec.id as import("@t3tools/contracts").PipelineExecutionId, + pipelineId: + exec.pipelineId as import("@t3tools/contracts").PipelineExecution["pipelineId"], + projectId: + exec.projectId as import("@t3tools/contracts").PipelineExecution["projectId"], + status: exec.status as import("@t3tools/contracts").PipelineExecution["status"], + stages: exec.stages.map((s) => ({ + stageId: s.stageId as import("@t3tools/contracts").PipelineStageId, + status: + s.status as import("@t3tools/contracts").PipelineStageExecution["status"], + threadId: null, + startedAt: s.startedAt, + completedAt: s.completedAt, + error: s.error, + retryCount: 0, + output: null, + })), + startedAt: exec.startedAt, + completedAt: exec.completedAt, + updatedAt: exec.updatedAt, + }) + : Effect.die(new Error("Execution not found")), + ), + ), + { "rpc.aggregate": "pipeline" }, + ), + [WS_METHODS.pipelineCancel]: (input) => + observeRpcEffect( + WS_METHODS.pipelineCancel, + pipelineService.cancel({ executionId: input.executionId }), + { "rpc.aggregate": "pipeline" }, + ), + [WS_METHODS.subscribePipelineEvents]: (_input) => + observeRpcStream( + WS_METHODS.subscribePipelineEvents, + pipelineService.streamEvents.pipe( + Stream.map((event) => ({ + type: "pipeline.execution.updated" as const, + execution: { + id: event.executionId as import("@t3tools/contracts").PipelineExecutionId, + pipelineId: "" as import("@t3tools/contracts").PipelineExecution["pipelineId"], + projectId: "" as import("@t3tools/contracts").PipelineExecution["projectId"], + status: (event.type === "pipeline.completed" + ? "completed" + : event.type === "pipeline.failed" + ? "failed" + : "running") as import("@t3tools/contracts").PipelineExecution["status"], + stages: [], + startedAt: event.timestamp, + completedAt: null, + updatedAt: event.timestamp, + }, + })), + ), + { "rpc.aggregate": "pipeline" }, + ), + + // ── Workflows ──────────────────────────────────────────────────────── + [WS_METHODS.workflowList]: (input) => + observeRpcEffect( + WS_METHODS.workflowList, + workflowService.list({ category: input.category }).pipe( + Effect.map((templates) => ({ + templates: templates.map((t) => ({ + id: t.id as import("@t3tools/contracts").WorkflowTemplateId, + name: t.name as import("@t3tools/contracts").WorkflowTemplate["name"], + description: t.description, + category: t.category as import("@t3tools/contracts").WorkflowTemplate["category"], + variables: t.variables.map((v) => ({ + name: v.name as import("@t3tools/contracts").WorkflowVariable["name"], + description: (v.description ?? + null) as import("@t3tools/contracts").WorkflowVariable["description"], + defaultValue: (v.defaultValue ?? + null) as import("@t3tools/contracts").WorkflowVariable["defaultValue"], + required: false, + })), + steps: t.steps.map((s) => ({ + id: s.id as import("@t3tools/contracts").WorkflowStepId, + name: s.name as import("@t3tools/contracts").WorkflowStep["name"], + kind: "prompt" as const, + prompt: s.prompt, + command: null, + condition: null, + continueOnError: false, + timeoutMs: 120_000, + dependsOn: s.dependsOn as import("@t3tools/contracts").WorkflowStepId[], + })), + isBuiltIn: t.isBuiltIn, + createdAt: t.createdAt, + updatedAt: t.updatedAt, + })), + })), + ), + { "rpc.aggregate": "workflow" }, + ), + [WS_METHODS.workflowCreate]: (input) => + observeRpcEffect( + WS_METHODS.workflowCreate, + workflowService + .create({ + id: crypto.randomUUID(), + name: input.name, + description: input.description, + category: input.category, + variables: input.variables.map((v) => ({ + name: v.name, + description: v.description ?? "", + defaultValue: v.defaultValue ?? null, + })), + steps: input.steps.map((s) => ({ + id: s.id, + name: s.name, + prompt: s.prompt ?? "", + dependsOn: s.dependsOn as unknown as string[], + })), + }) + .pipe( + Effect.map((t) => ({ + id: t.id as import("@t3tools/contracts").WorkflowTemplateId, + name: t.name as import("@t3tools/contracts").WorkflowTemplate["name"], + description: t.description, + category: t.category as import("@t3tools/contracts").WorkflowTemplate["category"], + variables: input.variables, + steps: input.steps, + isBuiltIn: false, + createdAt: t.createdAt, + updatedAt: t.updatedAt, + })), + ), + { "rpc.aggregate": "workflow" }, + ), + [WS_METHODS.workflowDelete]: (input) => + observeRpcEffect( + WS_METHODS.workflowDelete, + workflowService.delete({ templateId: input.templateId }), + { "rpc.aggregate": "workflow" }, + ), + [WS_METHODS.workflowExecute]: (input) => + observeRpcEffect( + WS_METHODS.workflowExecute, + workflowService.execute({ + templateId: input.templateId, + projectId: input.projectId, + threadId: crypto.randomUUID(), + variables: input.variables, + executionId: crypto.randomUUID(), + pipelineId: crypto.randomUUID(), + }), + { "rpc.aggregate": "workflow" }, + ), + + // ── Task Decomposition ─────────────────────────────────────────────── + [WS_METHODS.taskDecompose]: (input) => + observeRpcEffect(WS_METHODS.taskDecompose, taskDecomposition.decompose(input), { + "rpc.aggregate": "task", + }), + [WS_METHODS.taskUpdateStatus]: (input) => + observeRpcEffect(WS_METHODS.taskUpdateStatus, taskDecomposition.updateStatus(input), { + "rpc.aggregate": "task", + }), + [WS_METHODS.taskGetTree]: (input) => + observeRpcEffect(WS_METHODS.taskGetTree, taskDecomposition.getTree(input), { + "rpc.aggregate": "task", + }), + [WS_METHODS.taskListTrees]: (input) => + observeRpcEffect(WS_METHODS.taskListTrees, taskDecomposition.listTrees(input), { + "rpc.aggregate": "task", + }), + [WS_METHODS.taskExecute]: (input) => + observeRpcEffect(WS_METHODS.taskExecute, taskDecomposition.execute(input), { + "rpc.aggregate": "task", + }), + [WS_METHODS.subscribeTaskEvents]: (_input) => + observeRpcStream(WS_METHODS.subscribeTaskEvents, taskDecomposition.streamEvents, { + "rpc.aggregate": "task", + }), + + // ── Memory ─────────────────────────────────────────────────────────── + [WS_METHODS.memoryIndex]: (input) => + observeRpcEffect(WS_METHODS.memoryIndex, projectMemory.index(input), { + "rpc.aggregate": "memory", + }), + [WS_METHODS.memorySearch]: (input) => + observeRpcEffect(WS_METHODS.memorySearch, projectMemory.search(input), { + "rpc.aggregate": "memory", + }), + [WS_METHODS.memoryAdd]: (input) => + observeRpcEffect(WS_METHODS.memoryAdd, projectMemory.add(input), { + "rpc.aggregate": "memory", + }), + [WS_METHODS.memoryForget]: (input) => + observeRpcEffect(WS_METHODS.memoryForget, projectMemory.forget(input), { + "rpc.aggregate": "memory", + }), + [WS_METHODS.memoryList]: (input) => + observeRpcEffect(WS_METHODS.memoryList, projectMemory.list(input), { + "rpc.aggregate": "memory", + }), + + // ── Presence ───────────────────────────────────────────────────────── + [WS_METHODS.presenceJoin]: (input) => + observeRpcEffect(WS_METHODS.presenceJoin, presenceService.join(input), { + "rpc.aggregate": "presence", + }), + [WS_METHODS.presenceLeave]: (input) => + observeRpcEffect(WS_METHODS.presenceLeave, presenceService.leave(input), { + "rpc.aggregate": "presence", + }), + [WS_METHODS.presenceUpdateCursor]: (input) => + observeRpcEffect(WS_METHODS.presenceUpdateCursor, presenceService.updateCursor(input), { + "rpc.aggregate": "presence", + }), + [WS_METHODS.presenceShare]: (input) => + observeRpcEffect(WS_METHODS.presenceShare, presenceService.share(input), { + "rpc.aggregate": "presence", + }), + [WS_METHODS.presenceGetParticipants]: (input) => + observeRpcEffect( + WS_METHODS.presenceGetParticipants, + presenceService.getParticipants(input), + { + "rpc.aggregate": "presence", + }, + ), + [WS_METHODS.subscribePresenceEvents]: (_input) => + observeRpcStream(WS_METHODS.subscribePresenceEvents, presenceService.streamEvents, { + "rpc.aggregate": "presence", + }), }); }), ); diff --git a/apps/web/src/appSettings.ts b/apps/web/src/appSettings.ts index bb09b79464..92c21d1621 100644 --- a/apps/web/src/appSettings.ts +++ b/apps/web/src/appSettings.ts @@ -123,6 +123,7 @@ export const AppSettingsSchema = Schema.Struct({ enableAssistantStreaming: Schema.Boolean.pipe(withDefaults(() => false)), showCommandOutput: Schema.Boolean.pipe(withDefaults(() => true)), showFileChangeDiffs: Schema.Boolean.pipe(withDefaults(() => true)), + followUpBehavior: Schema.Literals(["steer", "queue"]).pipe(withDefaults(() => "steer" as const)), sidebarProjectSortOrder: SidebarProjectSortOrder.pipe( withDefaults(() => DEFAULT_SIDEBAR_PROJECT_SORT_ORDER), ), diff --git a/apps/web/src/auditStore.ts b/apps/web/src/auditStore.ts new file mode 100644 index 0000000000..a265840b53 --- /dev/null +++ b/apps/web/src/auditStore.ts @@ -0,0 +1,60 @@ +import type { AuditEntry, AuditQueryInput, AuditQueryResult } from "@t3tools/contracts"; +import { create } from "zustand"; + +import { getWsRpcClient } from "./wsRpcClient"; + +export interface AuditState { + entries: AuditEntry[]; + total: number; + hasMore: boolean; + isLoading: boolean; + error: string | null; +} + +export interface AuditStore extends AuditState { + query: (input?: Partial) => Promise; + clearError: () => void; +} + +const initialState: AuditState = { + entries: [], + total: 0, + hasMore: false, + isLoading: false, + error: null, +}; + +export const useAuditStore = create((set) => ({ + ...initialState, + + query: async (input = {}) => { + set({ isLoading: true, error: null }); + try { + const client = getWsRpcClient(); + const result: AuditQueryResult = await client.audit.query(input as AuditQueryInput); + set({ + entries: result.entries as AuditEntry[], + total: result.total, + hasMore: result.hasMore, + isLoading: false, + }); + } catch (e) { + set({ error: String(e), isLoading: false }); + } + }, + + clearError: () => set({ error: null }), +})); + +/** Subscribe to live audit events; returns an unsubscribe function. */ +export function subscribeAuditEvents(): () => void { + const client = getWsRpcClient(); + return client.audit.onEvent((event) => { + if (event.type === "audit.entry") { + useAuditStore.setState((s) => ({ + entries: [event.entry as AuditEntry, ...s.entries].slice(0, 500), + total: s.total + 1, + })); + } + }); +} diff --git a/apps/web/src/ciStore.ts b/apps/web/src/ciStore.ts new file mode 100644 index 0000000000..a7f50c44e8 --- /dev/null +++ b/apps/web/src/ciStore.ts @@ -0,0 +1,101 @@ +import type { + CIFeedbackPolicy, + CIGetStatusInput, + CIRun, + CISetFeedbackPolicyInput, + CITriggerRerunInput, +} from "@t3tools/contracts"; +import { create } from "zustand"; + +import { getWsRpcClient } from "./wsRpcClient"; + +export interface CIState { + runsByProject: Record; + feedbackPolicies: Record; + isLoading: boolean; + error: string | null; +} + +export interface CIStore extends CIState { + fetchRuns: (input: CIGetStatusInput) => Promise; + triggerRerun: (input: CITriggerRerunInput) => Promise; + setFeedbackPolicy: (input: CISetFeedbackPolicyInput) => Promise; + clearError: () => void; +} + +const initialState: CIState = { + runsByProject: {}, + feedbackPolicies: {}, + isLoading: false, + error: null, +}; + +export const useCIStore = create((set) => ({ + ...initialState, + + fetchRuns: async (input) => { + set({ isLoading: true, error: null }); + try { + const client = getWsRpcClient(); + const result = await client.ci.getStatus(input); + set((s) => ({ + runsByProject: { + ...s.runsByProject, + [input.projectId]: result.runs as CIRun[], + }, + isLoading: false, + })); + } catch (e) { + set({ error: String(e), isLoading: false }); + } + }, + + triggerRerun: async (input) => { + set({ isLoading: true, error: null }); + try { + const client = getWsRpcClient(); + await client.ci.triggerRerun(input); + set({ isLoading: false }); + } catch (e) { + set({ error: String(e), isLoading: false }); + } + }, + + setFeedbackPolicy: async (input) => { + set({ isLoading: true, error: null }); + try { + const client = getWsRpcClient(); + const policy = await client.ci.setFeedbackPolicy(input); + set((s) => ({ + feedbackPolicies: { + ...s.feedbackPolicies, + [input.projectId]: policy as CIFeedbackPolicy, + }, + isLoading: false, + })); + } catch (e) { + set({ error: String(e), isLoading: false }); + } + }, + + clearError: () => set({ error: null }), +})); + +/** Subscribe to live CI events; returns an unsubscribe function. */ +export function subscribeCIEvents(): () => void { + const client = getWsRpcClient(); + return client.ci.onEvent((event) => { + if (event.type === "ci.run.updated") { + const run = event.run as CIRun; + useCIStore.setState((s) => { + const existing = s.runsByProject[run.projectId] ?? []; + const updated = existing.some((r) => r.id === run.id) + ? existing.map((r) => (r.id === run.id ? run : r)) + : [run, ...existing]; + return { + runsByProject: { ...s.runsByProject, [run.projectId]: updated }, + }; + }); + } + }); +} diff --git a/apps/web/src/components/ChatView.tsx b/apps/web/src/components/ChatView.tsx index 5c29cdfa00..b7a6d5da71 100644 --- a/apps/web/src/components/ChatView.tsx +++ b/apps/web/src/components/ChatView.tsx @@ -123,6 +123,7 @@ import { resolveSelectableProvider, } from "../providerModels"; import { useSettings } from "../hooks/useSettings"; +import { useAppSettings } from "../appSettings"; import { resolveAppModelSelection } from "../modelSelection"; import { isTerminalFocused } from "../lib/terminalFocus"; import { @@ -163,6 +164,10 @@ import { ComposerPrimaryActions } from "./chat/ComposerPrimaryActions"; import { ComposerPendingApprovalPanel } from "./chat/ComposerPendingApprovalPanel"; import { ComposerPendingUserInputPanel } from "./chat/ComposerPendingUserInputPanel"; import { ComposerPlanFollowUpBanner } from "./chat/ComposerPlanFollowUpBanner"; +import { + ComposerQueuedFollowUpsPanel, + type QueuedFollowUpMessage, +} from "./chat/ComposerQueuedFollowUpsPanel"; import { getComposerProviderState, renderProviderTraitsMenuContent, @@ -662,6 +667,14 @@ export default function ChatView({ threadId }: ChatViewProps) { const promptRef = useRef(prompt); const [showScrollToBottom, setShowScrollToBottom] = useState(false); const [isDragOverComposer, setIsDragOverComposer] = useState(false); + const { settings: appSettings } = useAppSettings(); + const followUpBehavior = appSettings.followUpBehavior; + + const [queuedFollowUps, setQueuedFollowUps] = useState([]); + // Ref so effects that only depend on latestTurnSettled can still read the latest queue + const queuedFollowUpsRef = useRef(queuedFollowUps); + queuedFollowUpsRef.current = queuedFollowUps; + const [expandedImage, setExpandedImage] = useState(null); const [optimisticUserMessages, setOptimisticUserMessages] = useState([]); const optimisticUserMessagesRef = useRef(optimisticUserMessages); @@ -1146,10 +1159,16 @@ export default function ChatView({ threadId }: ChatViewProps) { localDispatchStartedAt, ); const isComposerApprovalState = activePendingApproval !== null; + const showQueuedFollowUpsPanel = + queuedFollowUps.length > 0 && + !isComposerApprovalState && + pendingUserInputs.length === 0 && + !(showPlanFollowUpPrompt && activeProposedPlan !== null); const hasComposerHeader = isComposerApprovalState || pendingUserInputs.length > 0 || - (showPlanFollowUpPrompt && activeProposedPlan !== null); + (showPlanFollowUpPrompt && activeProposedPlan !== null) || + showQueuedFollowUpsPanel; const composerFooterHasWideActions = showPlanFollowUpPrompt || activePendingProgress !== null; const composerFooterActionLayoutKey = useMemo(() => { if (activePendingProgress) { @@ -2346,6 +2365,53 @@ export default function ChatView({ threadId }: ChatViewProps) { setIsRevertingCheckpoint(false); }, [activeThread?.id]); + // Clear queued follow-ups when switching threads. + useEffect(() => { + setQueuedFollowUps([]); + }, [activeThread?.id]); + + // Stable refs so the settle effect always sees the latest values without + // requiring them in its deps array (it must only fire on transition to settled). + const activeThreadRef = useRef(activeThread); + activeThreadRef.current = activeThread; + const selectedModelSelectionRef = useRef(selectedModelSelection); + selectedModelSelectionRef.current = selectedModelSelection; + const runtimeModeRef = useRef(runtimeMode); + runtimeModeRef.current = runtimeMode; + const interactionModeRef = useRef(interactionMode); + interactionModeRef.current = interactionMode; + + // Auto-dispatch queued follow-ups on the transition from running → settled. + const prevTurnSettledRef = useRef(latestTurnSettled); + useEffect(() => { + const wasSettled = prevTurnSettledRef.current; + prevTurnSettledRef.current = latestTurnSettled; + if (wasSettled || !latestTurnSettled) return; + const next = queuedFollowUpsRef.current[0]; + const thread = activeThreadRef.current; + if (!next || !thread) return; + setQueuedFollowUps((prev) => prev.slice(1)); + const api = readNativeApi(); + if (!api) return; + const messageId = newMessageId(); + const createdAt = new Date().toISOString(); + void api.orchestration + .dispatchCommand({ + type: "thread.turn.start", + commandId: newCommandId(), + threadId: thread.id, + message: { messageId, role: "user", text: next.text, attachments: [] }, + modelSelection: selectedModelSelectionRef.current, + runtimeMode: runtimeModeRef.current, + interactionMode: interactionModeRef.current, + createdAt, + }) + .catch(() => { + // Re-queue the item on failure so it isn't lost. + setQueuedFollowUps((prev) => [next, ...prev]); + }); + }, [latestTurnSettled]); // only fires on transition to settled; reads latest values via refs + useEffect(() => { if (!activeThread?.id || terminalState.terminalOpen) return; const frame = window.requestAnimationFrame(() => { @@ -2865,7 +2931,10 @@ export default function ChatView({ threadId }: ChatViewProps) { [activeThread, isConnecting, isRevertingCheckpoint, isSendBusy, phase, setThreadError], ); - const onSend = async (e?: { preventDefault: () => void }) => { + const onSend = async ( + e?: { preventDefault: () => void }, + opts?: { forceImmediate?: boolean; forceQueue?: boolean }, + ) => { e?.preventDefault(); const api = readNativeApi(); if (!api || !activeThread || isSendBusy || isConnecting || sendInFlightRef.current) return; @@ -2947,6 +3016,34 @@ export default function ChatView({ threadId }: ChatViewProps) { return; } + // Queue mode: hold follow-ups until the active turn settles. + // Also applies in steer mode when forceQueue is set (Cmd/Ctrl+Shift+Enter one-off). + // Only applies to follow-up messages (server thread with existing messages, no images). + // forceImmediate bypasses queue mode; forceQueue activates it from steer mode. + const shouldQueue = + isServerThread && + !isFirstMessage && + composerImages.length === 0 && + (opts?.forceQueue === true || + (followUpBehavior === "queue" && phase === "running" && !opts?.forceImmediate)); + if (shouldQueue) { + const queueText = appendTerminalContextsToPrompt(trimmed, sendableComposerTerminalContexts); + setQueuedFollowUps((prev) => [ + ...prev, + { + id: randomUUID(), + text: queueText, + displayText: trimmed.slice(0, 120) || "[message]", + }, + ]); + promptRef.current = ""; + clearComposerDraftContent(threadIdForSend); + setComposerHighlightedItemId(null); + setComposerCursor(0); + setComposerTrigger(null); + return; + } + sendInFlightRef.current = true; beginLocalDispatch({ preparingWorktree: Boolean(baseBranchForWorktree) }); @@ -3872,6 +3969,17 @@ export default function ChatView({ threadId }: ChatViewProps) { } } + // Cmd/Ctrl+Shift+Enter: one-off behavior inversion. + // In queue mode → send immediately; in steer mode → add to queue. + if (key === "Enter" && event.shiftKey && (event.metaKey || event.ctrlKey)) { + if (followUpBehavior === "queue") { + void onSend(undefined, { forceImmediate: true }); + } else { + void onSend(undefined, { forceQueue: true }); + } + return true; + } + if (key === "Enter" && !event.shiftKey) { void onSend(); return true; @@ -4096,6 +4204,16 @@ export default function ChatView({ threadId }: ChatViewProps) { /> ) : null} + {showQueuedFollowUpsPanel && ( +
+ + setQueuedFollowUps((prev) => prev.filter((item) => item.id !== id)) + } + /> +
+ )}
; + onRemove: (id: string) => void; +}) { + if (items.length === 0) return null; + return ( +
+ + {items.length === 1 ? "1 follow-up queued" : `${items.length} follow-ups queued`} + + {items.map((item, index) => ( +
+ + {index + 1}. + {item.displayText} + + +
+ ))} +
+ ); +}); diff --git a/apps/web/src/components/settings/FeaturesPanels.tsx b/apps/web/src/components/settings/FeaturesPanels.tsx new file mode 100644 index 0000000000..53eb2fccec --- /dev/null +++ b/apps/web/src/components/settings/FeaturesPanels.tsx @@ -0,0 +1,729 @@ +import { useEffect, useState } from "react"; +import { + ActivityIcon, + BrainIcon, + GitForkIcon, + LayoutGridIcon, + NetworkIcon, + ShieldCheckIcon, + UsersIcon, + WalletIcon, + WorkflowIcon, +} from "lucide-react"; +import { Button } from "../ui/button"; +import { cn } from "../../lib/utils"; +import { useCostStore, subscribeCostEvents } from "../../costStore"; +import { useAuditStore, subscribeAuditEvents } from "../../auditStore"; +import { useCIStore, subscribeCIEvents } from "../../ciStore"; +import { useRoutingStore, subscribeRoutingEvents } from "../../routingStore"; +import { usePipelineStore, subscribePipelineEvents } from "../../pipelineStore"; +import { useWorkflowStore } from "../../workflowStore"; +import { useTaskStore, subscribeTaskEvents } from "../../taskStore"; +import { useMemoryStore } from "../../memoryStore"; +import { usePresenceStore, subscribePresenceEvents } from "../../presenceStore"; + +// ── Types ────────────────────────────────────────────────────────────── + +type FeatureTab = + | "cost" + | "audit" + | "ci" + | "routing" + | "pipelines" + | "workflows" + | "tasks" + | "memory" + | "presence"; + +interface TabConfig { + id: FeatureTab; + label: string; + icon: typeof WalletIcon; + description: string; +} + +const TABS: readonly TabConfig[] = [ + { + id: "cost", + label: "Cost & Tokens", + icon: WalletIcon, + description: "Track token usage and budget limits", + }, + { + id: "audit", + label: "Audit Log", + icon: ShieldCheckIcon, + description: "Structured activity log", + }, + { id: "ci", label: "CI / CD", icon: GitForkIcon, description: "CI pipeline status and feedback" }, + { + id: "routing", + label: "Routing", + icon: NetworkIcon, + description: "Provider health and routing rules", + }, + { + id: "pipelines", + label: "Pipelines", + icon: WorkflowIcon, + description: "Multi-agent pipeline execution", + }, + { + id: "workflows", + label: "Workflows", + icon: LayoutGridIcon, + description: "Reusable workflow templates", + }, + { id: "tasks", label: "Tasks", icon: ActivityIcon, description: "Task decomposition trees" }, + { id: "memory", label: "Memory", icon: BrainIcon, description: "Project knowledge base" }, + { + id: "presence", + label: "Presence", + icon: UsersIcon, + description: "Shared sessions and presence", + }, +] as const; + +// ── Cost Panel ───────────────────────────────────────────────────────── + +function CostPanel() { + const { summary, budgets, recentAlerts, isLoading, error, fetchSummary, fetchBudgets } = + useCostStore(); + + useEffect(() => { + const unsub = subscribeCostEvents(); + void fetchSummary(); + void fetchBudgets(); + return unsub; + }, [fetchSummary, fetchBudgets]); + + return ( +
+
+

Usage Summary

+ {isLoading &&

Loading…

} + {error &&

{error}

} + {summary && ( +
+
+

Total Cost

+

+ ${(summary.totalCostCents / 100).toFixed(4)} +

+
+
+

Input Tokens

+

+ {summary.totalInputTokens.toLocaleString()} +

+
+
+

Output Tokens

+

+ {summary.totalOutputTokens.toLocaleString()} +

+
+
+

Thinking Tokens

+

+ {summary.totalThinkingTokens.toLocaleString()} +

+
+
+ )} +
+ + {budgets.length > 0 && ( +
+

Budgets

+
+ {budgets.map((b) => { + const pct = + b.limitCents > 0 ? Math.round((b.currentSpendCents / b.limitCents) * 100) : 0; + return ( +
+
+ {b.projectId ?? "Global"} + = 90 ? "text-red-500" : "text-muted-foreground", + )} + > + {pct}% of ${(b.limitCents / 100).toFixed(2)} + +
+
+
= 90 ? "bg-red-500" : "bg-primary")} + style={{ width: `${Math.min(100, pct)}%` }} + /> +
+
+ ); + })} +
+
+ )} + + {recentAlerts.length > 0 && ( +
+

Recent Alerts

+
+ {recentAlerts.slice(0, 5).map((a, i) => ( +
+ {a.percentUsed}% budget used + {a.alertedAt} +
+ ))} +
+
+ )} + + {summary?.byProvider && summary.byProvider.length > 0 && ( +
+

By Provider

+
+ {summary.byProvider.map((p) => ( +
+ {p.provider} + ${(p.costCents / 100).toFixed(4)} +
+ ))} +
+
+ )} +
+ ); +} + +// ── Audit Panel ──────────────────────────────────────────────────────── + +function AuditPanel() { + const { entries, total, isLoading, error, query } = useAuditStore(); + + useEffect(() => { + const unsub = subscribeAuditEvents(); + void query({ limit: 50, offset: 0 }); + return unsub; + }, [query]); + + const SEVERITY_COLORS = { + info: "text-blue-500", + warning: "text-amber-500", + critical: "text-red-500", + } as const; + + return ( +
+
+

Activity Log ({total} total)

+ +
+ {isLoading &&

Loading…

} + {error &&

{error}

} +
+ {entries.map((e) => ( +
+ + {e.severity} + +
+ {e.action} + {e.summary} +
+ {e.timestamp.slice(11, 19)} +
+ ))} + {entries.length === 0 && !isLoading && ( +

No audit entries yet.

+ )} +
+
+ ); +} + +// ── CI Panel ─────────────────────────────────────────────────────────── + +function CIPanel() { + const { runsByProject, isLoading, error } = useCIStore(); + + useEffect(() => { + return subscribeCIEvents(); + }, []); + + const STATUS_COLORS = { + queued: "text-muted-foreground", + in_progress: "text-blue-500", + completed: "text-green-500", + failed: "text-red-500", + cancelled: "text-muted-foreground", + timed_out: "text-amber-500", + } as const; + + const allRuns = Object.values(runsByProject).flat(); + + return ( +
+

CI Runs

+ {isLoading &&

Loading…

} + {error &&

{error}

} + {allRuns.length === 0 && !isLoading && ( +

+ No CI runs tracked yet. CI runs will appear here when recorded. +

+ )} +
+ {allRuns.slice(0, 20).map((run) => ( +
+
+ {run.workflowName} + {run.status} +
+
+ {run.branch} + · + {run.commitSha.slice(0, 7)} + {run.conclusion && ( + <> + · + {run.conclusion} + + )} +
+
+ ))} +
+
+ ); +} + +// ── Routing Panel ────────────────────────────────────────────────────── + +function RoutingPanel() { + const { providerHealth, rules, isLoading, error, fetchHealth, fetchRules } = useRoutingStore(); + + useEffect(() => { + const unsub = subscribeRoutingEvents(); + void fetchHealth(); + void fetchRules(); + return unsub; + }, [fetchHealth, fetchRules]); + + const STATUS_COLORS = { + healthy: "text-green-500", + degraded: "text-amber-500", + down: "text-red-500", + unknown: "text-muted-foreground", + } as const; + + return ( +
+
+

Provider Health

+ {isLoading &&

Loading…

} + {error &&

{error}

} +
+ {providerHealth.map((h) => ( +
+ {h.provider} + {h.status} +
+ ))} +
+
+ +
+

Routing Rules ({rules.length})

+ {rules.length === 0 && ( +

+ No routing rules configured. Default routing uses claudeAgent. +

+ )} +
+ {rules.map((r, i) => ( +
+
+ {r.name} + + {r.strategy} · priority {r.priority} + +
+ {r.preferredProviders.length > 0 && ( +

+ Preferred: {r.preferredProviders.join(", ")} +

+ )} +
+ ))} +
+
+
+ ); +} + +// ── Pipelines Panel ──────────────────────────────────────────────────── + +function PipelinesPanel() { + const { pipelinesByProject, executions, isLoading, error } = usePipelineStore(); + + useEffect(() => { + return subscribePipelineEvents(); + }, []); + + const allPipelines = Object.values(pipelinesByProject).flat(); + const allExecutions = Object.values(executions); + + return ( +
+
+

Pipelines ({allPipelines.length})

+ {isLoading &&

Loading…

} + {error &&

{error}

} + {allPipelines.length === 0 && !isLoading && ( +

+ No pipelines defined yet. Create one via the API. +

+ )} + {allPipelines.slice(0, 10).map((p) => ( +
+

{p.name}

+ {p.description &&

{p.description}

} +

{p.stages.length} stages

+
+ ))} +
+ + {allExecutions.length > 0 && ( +
+

Executions

+
+ {allExecutions.slice(0, 10).map((e) => ( +
+
+ {e.pipelineId} + + {e.status} + +
+

+ {e.stages.filter((s) => s.status === "completed").length}/{e.stages.length} stages + complete +

+
+ ))} +
+
+ )} +
+ ); +} + +// ── Workflows Panel ──────────────────────────────────────────────────── + +function WorkflowsPanel() { + const { templates, isLoading, error, fetchTemplates } = useWorkflowStore(); + + useEffect(() => { + void fetchTemplates(); + }, [fetchTemplates]); + + const builtIn = templates.filter((t) => t.isBuiltIn); + const custom = templates.filter((t) => !t.isBuiltIn); + + return ( +
+ {isLoading &&

Loading…

} + {error &&

{error}

} + +
+

Built-in Templates ({builtIn.length})

+
+ {builtIn.map((t) => ( +
+

{t.name}

+ {t.description &&

{t.description}

} +

+ {t.steps.length} steps · {t.category} +

+ {t.variables.length > 0 && ( +

+ Variables: {t.variables.map((v) => v.name).join(", ")} +

+ )} +
+ ))} +
+
+ + {custom.length > 0 && ( +
+

Custom Templates ({custom.length})

+
+ {custom.map((t) => ( +
+

{t.name}

+ {t.description &&

{t.description}

} +

+ {t.steps.length} steps · {t.category} +

+
+ ))} +
+
+ )} +
+ ); +} + +// ── Tasks Panel ──────────────────────────────────────────────────────── + +function TasksPanel() { + const { trees, isLoading, error } = useTaskStore(); + + useEffect(() => { + return subscribeTaskEvents(); + }, []); + + const allTrees = Object.values(trees); + + const STATUS_COLORS = { + pending: "text-muted-foreground", + "in-progress": "text-blue-500", + completed: "text-green-500", + failed: "text-red-500", + blocked: "text-amber-500", + skipped: "text-muted-foreground", + } as const; + + return ( +
+

Task Trees ({allTrees.length})

+ {isLoading &&

Loading…

} + {error &&

{error}

} + {allTrees.length === 0 && !isLoading && ( +

+ No task trees yet. Decompose a prompt to create one. +

+ )} +
+ {allTrees.slice(0, 10).map((tree) => ( +
+
+

{tree.rootPrompt}

+ + {tree.status} + +
+

{tree.tasks.length} tasks

+
+ {tree.tasks.slice(0, 5).map((task) => ( +
+ · + {task.title} +
+ ))} + {tree.tasks.length > 5 && ( +

+{tree.tasks.length - 5} more

+ )} +
+
+ ))} +
+
+ ); +} + +// ── Memory Panel ─────────────────────────────────────────────────────── + +function MemoryPanel() { + const { entriesByProject, searchResults, isLoading, error } = useMemoryStore(); + + useEffect(() => { + // No events to subscribe — memory is CRUD-only + }, []); + + const allEntries = Object.values(entriesByProject).flat(); + + return ( +
+
+

Memory Entries ({allEntries.length})

+ {isLoading &&

Loading…

} + {error &&

{error}

} + {allEntries.length === 0 && !isLoading && ( +

+ No memory entries yet. Add entries to give agents persistent context about your project. +

+ )} +
+ {allEntries.slice(0, 10).map((e) => ( +
+
+ {e.title} + {e.kind} +
+

{e.content}

+ {e.tags.length > 0 && ( +
+ {e.tags.map((tag) => ( + + {tag} + + ))} +
+ )} +
+ ))} +
+
+ + {searchResults.length > 0 && ( +
+

Search Results

+
+ {searchResults.map((r) => ( +
+
+ {r.entry.title} + score {r.matchScore.toFixed(2)} +
+

{r.entry.content.slice(0, 100)}

+
+ ))} +
+
+ )} +
+ ); +} + +// ── Presence Panel ───────────────────────────────────────────────────── + +function PresencePanel() { + const { participantsByThread, sharesByThread, isLoading, error } = usePresenceStore(); + + useEffect(() => { + return subscribePresenceEvents(); + }, []); + + const threadIds = Object.keys(participantsByThread); + + return ( +
+

Active Sessions

+ {isLoading &&

Loading…

} + {error &&

{error}

} + {threadIds.length === 0 && !isLoading && ( +

+ No active shared sessions. Share a thread to enable multi-user collaboration. +

+ )} +
+ {threadIds.map((threadId) => { + const participants = participantsByThread[threadId] ?? []; + const share = sharesByThread[threadId]; + return ( +
+
+

{threadId}

+ {share && ( + + {share.isPublic ? "public" : "private"} · {share.maxParticipants} max + + )} +
+
+ {participants.map((p) => ( +
+ + {p.displayName} + ({p.cursor}) +
+ ))} +
+
+ ); + })} +
+
+ ); +} + +// ── Main Features Panel ──────────────────────────────────────────────── + +const TAB_PANELS: Record React.JSX.Element> = { + cost: CostPanel, + audit: AuditPanel, + ci: CIPanel, + routing: RoutingPanel, + pipelines: PipelinesPanel, + workflows: WorkflowsPanel, + tasks: TasksPanel, + memory: MemoryPanel, + presence: PresencePanel, +}; + +export function FeaturesSettingsPanel() { + const [activeTab, setActiveTab] = useState("cost"); + const ActivePanel = TAB_PANELS[activeTab]; + + return ( +
+ {/* Tab sidebar */} +
+

+ Features +

+ +
+ + {/* Panel content */} +
+ +
+
+ ); +} diff --git a/apps/web/src/components/settings/SettingsSidebarNav.tsx b/apps/web/src/components/settings/SettingsSidebarNav.tsx index 6ba698c91f..842c9b93ff 100644 --- a/apps/web/src/components/settings/SettingsSidebarNav.tsx +++ b/apps/web/src/components/settings/SettingsSidebarNav.tsx @@ -1,5 +1,5 @@ import type { ComponentType } from "react"; -import { ArchiveIcon, ArrowLeftIcon, Settings2Icon } from "lucide-react"; +import { ArchiveIcon, ArrowLeftIcon, LayoutGridIcon, Settings2Icon } from "lucide-react"; import { useNavigate } from "@tanstack/react-router"; import { @@ -12,7 +12,7 @@ import { SidebarSeparator, } from "../ui/sidebar"; -export type SettingsSectionPath = "/settings/general" | "/settings/archived"; +export type SettingsSectionPath = "/settings/general" | "/settings/archived" | "/settings/features"; export const SETTINGS_NAV_ITEMS: ReadonlyArray<{ label: string; @@ -21,6 +21,7 @@ export const SETTINGS_NAV_ITEMS: ReadonlyArray<{ }> = [ { label: "General", to: "/settings/general", icon: Settings2Icon }, { label: "Archive", to: "/settings/archived", icon: ArchiveIcon }, + { label: "Features", to: "/settings/features", icon: LayoutGridIcon }, ]; export function SettingsSidebarNav({ pathname }: { pathname: string }) { diff --git a/apps/web/src/costStore.ts b/apps/web/src/costStore.ts new file mode 100644 index 0000000000..bb182e2da9 --- /dev/null +++ b/apps/web/src/costStore.ts @@ -0,0 +1,86 @@ +import type { + CostAlert, + CostBudget, + CostEntry, + CostGetSummaryInput, + CostSummary, +} from "@t3tools/contracts"; +import { create } from "zustand"; + +import { getWsRpcClient } from "./wsRpcClient"; + +export interface CostState { + summary: CostSummary | null; + budgets: CostBudget[]; + recentEntries: CostEntry[]; + recentAlerts: CostAlert[]; + isLoading: boolean; + error: string | null; +} + +export interface CostStore extends CostState { + fetchSummary: (input?: CostGetSummaryInput) => Promise; + fetchBudgets: (projectId?: string) => Promise; + clearError: () => void; +} + +const initialState: CostState = { + summary: null, + budgets: [], + recentEntries: [], + recentAlerts: [], + isLoading: false, + error: null, +}; + +export const useCostStore = create((set) => ({ + ...initialState, + + fetchSummary: async (input = {}) => { + set({ isLoading: true, error: null }); + try { + const client = getWsRpcClient(); + const summary = await client.cost.getSummary( + input as Parameters[0], + ); + set({ summary, isLoading: false }); + } catch (e) { + set({ error: String(e), isLoading: false }); + } + }, + + fetchBudgets: async (projectId) => { + set({ isLoading: true, error: null }); + try { + const client = getWsRpcClient(); + const result = await client.cost.getBudgets(projectId ? { projectId: projectId as any } : {}); + set({ budgets: (result as { budgets: CostBudget[] }).budgets, isLoading: false }); + } catch (e) { + set({ error: String(e), isLoading: false }); + } + }, + + clearError: () => set({ error: null }), +})); + +/** Subscribe to live cost events; returns an unsubscribe function. */ +export function subscribeCostEvents(): () => void { + const client = getWsRpcClient(); + return client.cost.onEvent((event) => { + if (event.type === "cost.entry") { + useCostStore.setState((s) => ({ + recentEntries: [event.entry, ...s.recentEntries].slice(0, 200), + })); + } else if (event.type === "cost.alert") { + useCostStore.setState((s) => ({ + recentAlerts: [event.alert, ...s.recentAlerts].slice(0, 50), + })); + } else if (event.type === "cost.budget.updated") { + useCostStore.setState((s) => ({ + budgets: s.budgets.some((b) => b.id === event.budget.id) + ? s.budgets.map((b) => (b.id === event.budget.id ? event.budget : b)) + : [...s.budgets, event.budget], + })); + } + }); +} diff --git a/apps/web/src/memoryStore.ts b/apps/web/src/memoryStore.ts new file mode 100644 index 0000000000..3df24dcfac --- /dev/null +++ b/apps/web/src/memoryStore.ts @@ -0,0 +1,123 @@ +import type { + MemoryAddInput, + MemoryEntry, + MemoryForgetInput, + MemoryListInput, + MemorySearchInput, + MemorySearchResult, +} from "@t3tools/contracts"; +import { create } from "zustand"; + +import { getWsRpcClient } from "./wsRpcClient"; + +export interface MemoryState { + entriesByProject: Record; + searchResults: MemorySearchResult[]; + searchQueryTime: number; + isLoading: boolean; + error: string | null; +} + +export interface MemoryStore extends MemoryState { + search: (input: MemorySearchInput) => Promise; + add: (input: MemoryAddInput) => Promise; + forget: (input: MemoryForgetInput) => Promise; + list: (input: MemoryListInput) => Promise; + index: (projectId: string, forceReindex?: boolean) => Promise; + clearSearchResults: () => void; + clearError: () => void; +} + +const initialState: MemoryState = { + entriesByProject: {}, + searchResults: [], + searchQueryTime: 0, + isLoading: false, + error: null, +}; + +export const useMemoryStore = create((set) => ({ + ...initialState, + + search: async (input) => { + set({ isLoading: true, error: null }); + try { + const client = getWsRpcClient(); + const result = await client.memory.search(input); + set({ + searchResults: result.results as MemorySearchResult[], + searchQueryTime: result.queryTime, + isLoading: false, + }); + } catch (e) { + set({ error: String(e), isLoading: false }); + } + }, + + add: async (input) => { + set({ isLoading: true, error: null }); + try { + const client = getWsRpcClient(); + const entry = (await client.memory.add(input)) as MemoryEntry; + set((s) => ({ + entriesByProject: { + ...s.entriesByProject, + [input.projectId]: [entry, ...(s.entriesByProject[input.projectId] ?? [])], + }, + isLoading: false, + })); + return entry; + } catch (e) { + set({ error: String(e), isLoading: false }); + return null; + } + }, + + forget: async (input) => { + set({ isLoading: true, error: null }); + try { + const client = getWsRpcClient(); + await client.memory.forget(input); + set((s) => { + const updated: Record = {}; + for (const [projectId, entries] of Object.entries(s.entriesByProject)) { + updated[projectId] = entries.filter((e) => e.id !== input.entryId); + } + return { entriesByProject: updated, isLoading: false }; + }); + } catch (e) { + set({ error: String(e), isLoading: false }); + } + }, + + list: async (input) => { + set({ isLoading: true, error: null }); + try { + const client = getWsRpcClient(); + const result = await client.memory.list(input); + set((s) => ({ + entriesByProject: { + ...s.entriesByProject, + [input.projectId]: result.entries as MemoryEntry[], + }, + isLoading: false, + })); + } catch (e) { + set({ error: String(e), isLoading: false }); + } + }, + + index: async (projectId, forceReindex = false) => { + set({ isLoading: true, error: null }); + try { + const client = getWsRpcClient(); + await client.memory.index({ projectId: projectId as any, forceReindex }); + set({ isLoading: false }); + } catch (e) { + set({ error: String(e), isLoading: false }); + } + }, + + clearSearchResults: () => set({ searchResults: [] }), + clearError: () => set({ error: null }), +})); diff --git a/apps/web/src/pipelineStore.ts b/apps/web/src/pipelineStore.ts new file mode 100644 index 0000000000..5308ab0cc4 --- /dev/null +++ b/apps/web/src/pipelineStore.ts @@ -0,0 +1,160 @@ +import type { + PipelineCreateInput, + PipelineDefinition, + PipelineExecution, + PipelineExecuteInput, + PipelineListInput, + PipelineStageExecution, +} from "@t3tools/contracts"; +import { create } from "zustand"; + +import { getWsRpcClient } from "./wsRpcClient"; + +export interface PipelineState { + pipelinesByProject: Record; + executions: Record; + isLoading: boolean; + error: string | null; +} + +export interface PipelineStore extends PipelineState { + fetchPipelines: (input: PipelineListInput) => Promise; + createPipeline: (input: PipelineCreateInput) => Promise; + executePipeline: (input: PipelineExecuteInput) => Promise; + getExecution: (executionId: string) => Promise; + cancelExecution: (executionId: string) => Promise; + clearError: () => void; +} + +const initialState: PipelineState = { + pipelinesByProject: {}, + executions: {}, + isLoading: false, + error: null, +}; + +export const usePipelineStore = create((set, get) => ({ + ...initialState, + + fetchPipelines: async (input) => { + set({ isLoading: true, error: null }); + try { + const client = getWsRpcClient(); + const result = await client.pipeline.list(input); + set((s) => ({ + pipelinesByProject: { + ...s.pipelinesByProject, + [input.projectId]: result.pipelines as PipelineDefinition[], + }, + isLoading: false, + })); + } catch (e) { + set({ error: String(e), isLoading: false }); + } + }, + + createPipeline: async (input) => { + set({ isLoading: true, error: null }); + try { + const client = getWsRpcClient(); + const pipeline = (await client.pipeline.create(input)) as PipelineDefinition; + set((s) => ({ + pipelinesByProject: { + ...s.pipelinesByProject, + [input.projectId]: [...(s.pipelinesByProject[input.projectId] ?? []), pipeline], + }, + isLoading: false, + })); + return pipeline; + } catch (e) { + set({ error: String(e), isLoading: false }); + return null; + } + }, + + executePipeline: async (input) => { + set({ isLoading: true, error: null }); + try { + const client = getWsRpcClient(); + const execution = (await client.pipeline.execute(input)) as PipelineExecution; + set((s) => ({ + executions: { ...s.executions, [execution.id]: execution }, + isLoading: false, + })); + return execution; + } catch (e) { + set({ error: String(e), isLoading: false }); + return null; + } + }, + + getExecution: async (executionId) => { + set({ isLoading: true, error: null }); + try { + const client = getWsRpcClient(); + const execution = (await client.pipeline.getExecution({ + executionId: executionId as any, + })) as PipelineExecution; + set((s) => ({ + executions: { ...s.executions, [execution.id]: execution }, + isLoading: false, + })); + } catch (e) { + set({ error: String(e), isLoading: false }); + } + }, + + cancelExecution: async (executionId) => { + set({ isLoading: true, error: null }); + try { + const client = getWsRpcClient(); + await client.pipeline.cancel({ executionId: executionId as any }); + set((s) => { + const existing = s.executions[executionId]; + if (!existing) return { isLoading: false }; + return { + executions: { + ...s.executions, + [executionId]: { ...existing, status: "cancelled" as const }, + }, + isLoading: false, + }; + }); + } catch (e) { + set({ error: String(e), isLoading: false }); + } + }, + + clearError: () => set({ error: null }), +})); + +/** Subscribe to live pipeline events; returns an unsubscribe function. */ +export function subscribePipelineEvents(): () => void { + const client = getWsRpcClient(); + return client.pipeline.onEvent((event) => { + if (event.type === "pipeline.execution.updated") { + const execution = event.execution as PipelineExecution; + usePipelineStore.setState((s) => ({ + executions: { ...s.executions, [execution.id]: execution }, + })); + } else if (event.type === "pipeline.stage.updated") { + const { executionId, stage } = event as { + executionId: string; + stage: PipelineStageExecution; + }; + usePipelineStore.setState((s) => { + const existing = s.executions[executionId]; + if (!existing) return {}; + const updatedStages = existing.stages.some((st) => st.stageId === stage.stageId) + ? existing.stages.map((st) => (st.stageId === stage.stageId ? stage : st)) + : [...existing.stages, stage]; + return { + executions: { + ...s.executions, + [executionId]: { ...existing, stages: updatedStages }, + }, + }; + }); + } + }); +} diff --git a/apps/web/src/presenceStore.ts b/apps/web/src/presenceStore.ts new file mode 100644 index 0000000000..6cfc56e033 --- /dev/null +++ b/apps/web/src/presenceStore.ts @@ -0,0 +1,178 @@ +import type { + Participant, + ParticipantId, + PresenceCursorKind, + PresenceGetParticipantsInput, + PresenceJoinInput, + PresenceLeaveInput, + PresenceShareInput, + PresenceUpdateCursorInput, + SessionShare, + ThreadId, +} from "@t3tools/contracts"; +import { create } from "zustand"; + +import { getWsRpcClient } from "./wsRpcClient"; + +export interface PresenceState { + participantsByThread: Record; + sharesByThread: Record; + isLoading: boolean; + error: string | null; +} + +export interface PresenceStore extends PresenceState { + join: (input: PresenceJoinInput) => Promise; + leave: (input: PresenceLeaveInput) => Promise; + updateCursor: (input: PresenceUpdateCursorInput) => Promise; + share: (input: PresenceShareInput) => Promise; + getParticipants: (input: PresenceGetParticipantsInput) => Promise; + clearError: () => void; +} + +const initialState: PresenceState = { + participantsByThread: {}, + sharesByThread: {}, + isLoading: false, + error: null, +}; + +export const usePresenceStore = create((set) => ({ + ...initialState, + + join: async (input) => { + set({ isLoading: true, error: null }); + try { + const client = getWsRpcClient(); + const participant = (await client.presence.join(input)) as Participant; + set((s) => ({ + participantsByThread: { + ...s.participantsByThread, + [input.threadId]: [...(s.participantsByThread[input.threadId] ?? []), participant], + }, + isLoading: false, + })); + return participant; + } catch (e) { + set({ error: String(e), isLoading: false }); + return null; + } + }, + + leave: async (input) => { + set({ isLoading: true, error: null }); + try { + const client = getWsRpcClient(); + await client.presence.leave(input); + set((s) => { + const { [input.threadId]: _removed, ...rest } = s.participantsByThread; + return { participantsByThread: rest, isLoading: false }; + }); + } catch (e) { + set({ error: String(e), isLoading: false }); + } + }, + + updateCursor: async (input) => { + set({ isLoading: true, error: null }); + try { + const client = getWsRpcClient(); + await client.presence.updateCursor(input); + set({ isLoading: false }); + } catch (e) { + set({ error: String(e), isLoading: false }); + } + }, + + share: async (input) => { + set({ isLoading: true, error: null }); + try { + const client = getWsRpcClient(); + const sessionShare = (await client.presence.share(input)) as SessionShare; + set((s) => ({ + sharesByThread: { ...s.sharesByThread, [input.threadId]: sessionShare }, + isLoading: false, + })); + return sessionShare; + } catch (e) { + set({ error: String(e), isLoading: false }); + return null; + } + }, + + getParticipants: async (input) => { + set({ isLoading: true, error: null }); + try { + const client = getWsRpcClient(); + const result = await client.presence.getParticipants(input); + set((s) => ({ + participantsByThread: { + ...s.participantsByThread, + [input.threadId]: result.participants as Participant[], + }, + sharesByThread: { + ...s.sharesByThread, + [input.threadId]: (result.share as SessionShare | null) ?? null, + }, + isLoading: false, + })); + } catch (e) { + set({ error: String(e), isLoading: false }); + } + }, + + clearError: () => set({ error: null }), +})); + +/** Subscribe to live presence events; returns an unsubscribe function. */ +export function subscribePresenceEvents(): () => void { + const client = getWsRpcClient(); + return client.presence.onEvent((event) => { + if (event.type === "presence.joined") { + const participant = event.participant as Participant; + const threadId = participant.activeThreadId; + if (!threadId) return; + usePresenceStore.setState((s) => ({ + participantsByThread: { + ...s.participantsByThread, + [threadId]: [ + ...(s.participantsByThread[threadId] ?? []).filter((p) => p.id !== participant.id), + participant, + ], + }, + })); + } else if (event.type === "presence.left") { + const { participantId, threadId } = event as { + participantId: ParticipantId; + threadId: ThreadId; + }; + usePresenceStore.setState((s) => ({ + participantsByThread: { + ...s.participantsByThread, + [threadId]: (s.participantsByThread[threadId] ?? []).filter( + (p) => p.id !== participantId, + ), + }, + })); + } else if (event.type === "presence.cursor.updated") { + const { participantId, cursor, threadId } = event as { + participantId: ParticipantId; + cursor: PresenceCursorKind; + threadId: ThreadId; + }; + usePresenceStore.setState((s) => ({ + participantsByThread: { + ...s.participantsByThread, + [threadId]: (s.participantsByThread[threadId] ?? []).map((p) => + p.id === participantId ? { ...p, cursor } : p, + ), + }, + })); + } else if (event.type === "presence.share.created") { + const share = event.share as SessionShare; + usePresenceStore.setState((s) => ({ + sharesByThread: { ...s.sharesByThread, [share.threadId]: share }, + })); + } + }); +} diff --git a/apps/web/src/routeTree.gen.ts b/apps/web/src/routeTree.gen.ts index 77b1b15842..cf02c3e493 100644 --- a/apps/web/src/routeTree.gen.ts +++ b/apps/web/src/routeTree.gen.ts @@ -14,6 +14,7 @@ import { Route as ChatRouteImport } from './routes/_chat' import { Route as ChatIndexRouteImport } from './routes/_chat.index' import { Route as SettingsGeneralRouteImport } from './routes/settings.general' import { Route as SettingsArchivedRouteImport } from './routes/settings.archived' +import { Route as SettingsFeaturesRouteImport } from './routes/settings.features' import { Route as ChatThreadIdRouteImport } from './routes/_chat.$threadId' const SettingsRoute = SettingsRouteImport.update({ @@ -40,6 +41,11 @@ const SettingsArchivedRoute = SettingsArchivedRouteImport.update({ path: '/archived', getParentRoute: () => SettingsRoute, } as any) +const SettingsFeaturesRoute = SettingsFeaturesRouteImport.update({ + id: '/features', + path: '/features', + getParentRoute: () => SettingsRoute, +} as any) const ChatThreadIdRoute = ChatThreadIdRouteImport.update({ id: '/$threadId', path: '/$threadId', @@ -51,12 +57,14 @@ export interface FileRoutesByFullPath { '/settings': typeof SettingsRouteWithChildren '/$threadId': typeof ChatThreadIdRoute '/settings/archived': typeof SettingsArchivedRoute + '/settings/features': typeof SettingsFeaturesRoute '/settings/general': typeof SettingsGeneralRoute } export interface FileRoutesByTo { '/settings': typeof SettingsRouteWithChildren '/$threadId': typeof ChatThreadIdRoute '/settings/archived': typeof SettingsArchivedRoute + '/settings/features': typeof SettingsFeaturesRoute '/settings/general': typeof SettingsGeneralRoute '/': typeof ChatIndexRoute } @@ -66,6 +74,7 @@ export interface FileRoutesById { '/settings': typeof SettingsRouteWithChildren '/_chat/$threadId': typeof ChatThreadIdRoute '/settings/archived': typeof SettingsArchivedRoute + '/settings/features': typeof SettingsFeaturesRoute '/settings/general': typeof SettingsGeneralRoute '/_chat/': typeof ChatIndexRoute } @@ -76,12 +85,14 @@ export interface FileRouteTypes { | '/settings' | '/$threadId' | '/settings/archived' + | '/settings/features' | '/settings/general' fileRoutesByTo: FileRoutesByTo to: | '/settings' | '/$threadId' | '/settings/archived' + | '/settings/features' | '/settings/general' | '/' id: @@ -90,6 +101,7 @@ export interface FileRouteTypes { | '/settings' | '/_chat/$threadId' | '/settings/archived' + | '/settings/features' | '/settings/general' | '/_chat/' fileRoutesById: FileRoutesById @@ -136,6 +148,13 @@ declare module '@tanstack/react-router' { preLoaderRoute: typeof SettingsArchivedRouteImport parentRoute: typeof SettingsRoute } + '/settings/features': { + id: '/settings/features' + path: '/features' + fullPath: '/settings/features' + preLoaderRoute: typeof SettingsFeaturesRouteImport + parentRoute: typeof SettingsRoute + } '/_chat/$threadId': { id: '/_chat/$threadId' path: '/$threadId' @@ -160,11 +179,13 @@ const ChatRouteWithChildren = ChatRoute._addFileChildren(ChatRouteChildren) interface SettingsRouteChildren { SettingsArchivedRoute: typeof SettingsArchivedRoute + SettingsFeaturesRoute: typeof SettingsFeaturesRoute SettingsGeneralRoute: typeof SettingsGeneralRoute } const SettingsRouteChildren: SettingsRouteChildren = { SettingsArchivedRoute: SettingsArchivedRoute, + SettingsFeaturesRoute: SettingsFeaturesRoute, SettingsGeneralRoute: SettingsGeneralRoute, } diff --git a/apps/web/src/routes/settings.features.tsx b/apps/web/src/routes/settings.features.tsx new file mode 100644 index 0000000000..f92f285bba --- /dev/null +++ b/apps/web/src/routes/settings.features.tsx @@ -0,0 +1,7 @@ +import { createFileRoute } from "@tanstack/react-router"; + +import { FeaturesSettingsPanel } from "../components/settings/FeaturesPanels"; + +export const Route = createFileRoute("/settings/features")({ + component: FeaturesSettingsPanel, +}); diff --git a/apps/web/src/routingStore.ts b/apps/web/src/routingStore.ts new file mode 100644 index 0000000000..d682fad5b1 --- /dev/null +++ b/apps/web/src/routingStore.ts @@ -0,0 +1,92 @@ +import type { + ProviderHealth, + RoutingDecision, + RoutingGetRulesResult, + RoutingRule, + RoutingSetRulesInput, +} from "@t3tools/contracts"; +import { create } from "zustand"; + +import { getWsRpcClient } from "./wsRpcClient"; + +export interface RoutingState { + providerHealth: ProviderHealth[]; + rules: RoutingRule[]; + recentDecisions: RoutingDecision[]; + isLoading: boolean; + error: string | null; +} + +export interface RoutingStore extends RoutingState { + fetchHealth: () => Promise; + fetchRules: () => Promise; + setRules: (input: RoutingSetRulesInput) => Promise; + clearError: () => void; +} + +const initialState: RoutingState = { + providerHealth: [], + rules: [], + recentDecisions: [], + isLoading: false, + error: null, +}; + +export const useRoutingStore = create((set) => ({ + ...initialState, + + fetchHealth: async () => { + set({ isLoading: true, error: null }); + try { + const client = getWsRpcClient(); + const result = await client.routing.getHealth(); + set({ providerHealth: result.providers as ProviderHealth[], isLoading: false }); + } catch (e) { + set({ error: String(e), isLoading: false }); + } + }, + + fetchRules: async () => { + set({ isLoading: true, error: null }); + try { + const client = getWsRpcClient(); + const result: RoutingGetRulesResult = await client.routing.getRules(); + set({ rules: result.rules as RoutingRule[], isLoading: false }); + } catch (e) { + set({ error: String(e), isLoading: false }); + } + }, + + setRules: async (input) => { + set({ isLoading: true, error: null }); + try { + const client = getWsRpcClient(); + const result = await client.routing.setRules(input); + set({ rules: result.rules as RoutingRule[], isLoading: false }); + } catch (e) { + set({ error: String(e), isLoading: false }); + } + }, + + clearError: () => set({ error: null }), +})); + +/** Subscribe to live routing events; returns an unsubscribe function. */ +export function subscribeRoutingEvents(): () => void { + const client = getWsRpcClient(); + return client.routing.onEvent((event) => { + if (event.type === "routing.health.updated") { + const health = event.health as ProviderHealth; + useRoutingStore.setState((s) => ({ + providerHealth: s.providerHealth.some((h) => h.provider === health.provider) + ? s.providerHealth.map((h) => (h.provider === health.provider ? health : h)) + : [...s.providerHealth, health], + })); + } else if (event.type === "routing.decision") { + const decision = event.decision as RoutingDecision; + useRoutingStore.setState((s) => ({ + recentDecisions: [decision, ...s.recentDecisions].slice(0, 100), + })); + } + }); +} diff --git a/apps/web/src/taskStore.ts b/apps/web/src/taskStore.ts new file mode 100644 index 0000000000..cc90be5a20 --- /dev/null +++ b/apps/web/src/taskStore.ts @@ -0,0 +1,118 @@ +import type { + TaskDecomposeInput, + TaskExecuteInput, + TaskListTreesInput, + TaskNode, + TaskTree, + TaskTreeId, + TaskUpdateStatusInput, +} from "@t3tools/contracts"; +import { create } from "zustand"; + +import { getWsRpcClient } from "./wsRpcClient"; + +export interface TaskState { + trees: Record; + isLoading: boolean; + error: string | null; +} + +export interface TaskStore extends TaskState { + decompose: (input: TaskDecomposeInput) => Promise; + listTrees: (input: TaskListTreesInput) => Promise; + getTree: (treeId: TaskTreeId) => Promise; + updateStatus: (input: TaskUpdateStatusInput) => Promise; + executeTree: (input: TaskExecuteInput) => Promise; + clearError: () => void; +} + +const initialState: TaskState = { + trees: {}, + isLoading: false, + error: null, +}; + +export const useTaskStore = create((set) => ({ + ...initialState, + + decompose: async (input) => { + set({ isLoading: true, error: null }); + try { + const client = getWsRpcClient(); + const tree = (await client.task.decompose(input)) as TaskTree; + set((s) => ({ trees: { ...s.trees, [tree.id]: tree }, isLoading: false })); + return tree; + } catch (e) { + set({ error: String(e), isLoading: false }); + return null; + } + }, + + listTrees: async (input) => { + set({ isLoading: true, error: null }); + try { + const client = getWsRpcClient(); + const result = await client.task.listTrees(input); + const treesById = Object.fromEntries((result.trees as TaskTree[]).map((t) => [t.id, t])); + set((s) => ({ trees: { ...s.trees, ...treesById }, isLoading: false })); + } catch (e) { + set({ error: String(e), isLoading: false }); + } + }, + + getTree: async (treeId) => { + set({ isLoading: true, error: null }); + try { + const client = getWsRpcClient(); + const tree = (await client.task.getTree({ treeId })) as TaskTree; + set((s) => ({ trees: { ...s.trees, [tree.id]: tree }, isLoading: false })); + } catch (e) { + set({ error: String(e), isLoading: false }); + } + }, + + updateStatus: async (input) => { + set({ isLoading: true, error: null }); + try { + const client = getWsRpcClient(); + await client.task.updateStatus(input); + set({ isLoading: false }); + } catch (e) { + set({ error: String(e), isLoading: false }); + } + }, + + executeTree: async (input) => { + set({ isLoading: true, error: null }); + try { + const client = getWsRpcClient(); + await client.task.execute(input); + set({ isLoading: false }); + } catch (e) { + set({ error: String(e), isLoading: false }); + } + }, + + clearError: () => set({ error: null }), +})); + +/** Subscribe to live task events; returns an unsubscribe function. */ +export function subscribeTaskEvents(): () => void { + const client = getWsRpcClient(); + return client.task.onEvent((event) => { + if (event.type === "task.tree.updated") { + const tree = event.tree as TaskTree; + useTaskStore.setState((s) => ({ trees: { ...s.trees, [tree.id]: tree } })); + } else if (event.type === "task.node.updated") { + const { treeId, node } = event as { treeId: string; node: TaskNode }; + useTaskStore.setState((s) => { + const tree = s.trees[treeId]; + if (!tree) return {}; + const updatedTasks = tree.tasks.some((t) => t.id === node.id) + ? tree.tasks.map((t) => (t.id === node.id ? node : t)) + : [...tree.tasks, node]; + return { trees: { ...s.trees, [treeId]: { ...tree, tasks: updatedTasks } } }; + }); + } + }); +} diff --git a/apps/web/src/workflowStore.ts b/apps/web/src/workflowStore.ts new file mode 100644 index 0000000000..75ad036cab --- /dev/null +++ b/apps/web/src/workflowStore.ts @@ -0,0 +1,85 @@ +import type { + WorkflowCreateInput, + WorkflowDeleteInput, + WorkflowExecuteInput, + WorkflowListInput, + WorkflowTemplate, +} from "@t3tools/contracts"; +import { create } from "zustand"; + +import { getWsRpcClient } from "./wsRpcClient"; + +export interface WorkflowState { + templates: WorkflowTemplate[]; + isLoading: boolean; + error: string | null; +} + +export interface WorkflowStore extends WorkflowState { + fetchTemplates: (input?: WorkflowListInput) => Promise; + createTemplate: (input: WorkflowCreateInput) => Promise; + deleteTemplate: (input: WorkflowDeleteInput) => Promise; + executeWorkflow: (input: WorkflowExecuteInput) => Promise; + clearError: () => void; +} + +const initialState: WorkflowState = { + templates: [], + isLoading: false, + error: null, +}; + +export const useWorkflowStore = create((set) => ({ + ...initialState, + + fetchTemplates: async (input = {}) => { + set({ isLoading: true, error: null }); + try { + const client = getWsRpcClient(); + const result = await client.workflow.list(input as WorkflowListInput); + set({ templates: result.templates as WorkflowTemplate[], isLoading: false }); + } catch (e) { + set({ error: String(e), isLoading: false }); + } + }, + + createTemplate: async (input) => { + set({ isLoading: true, error: null }); + try { + const client = getWsRpcClient(); + const template = (await client.workflow.create(input)) as WorkflowTemplate; + set((s) => ({ templates: [...s.templates, template], isLoading: false })); + return template; + } catch (e) { + set({ error: String(e), isLoading: false }); + return null; + } + }, + + deleteTemplate: async (input) => { + set({ isLoading: true, error: null }); + try { + const client = getWsRpcClient(); + await client.workflow.delete(input); + set((s) => ({ + templates: s.templates.filter((t) => t.id !== input.templateId), + isLoading: false, + })); + } catch (e) { + set({ error: String(e), isLoading: false }); + } + }, + + executeWorkflow: async (input) => { + set({ isLoading: true, error: null }); + try { + const client = getWsRpcClient(); + await client.workflow.execute(input); + set({ isLoading: false }); + } catch (e) { + set({ error: String(e), isLoading: false }); + } + }, + + clearError: () => set({ error: null }), +})); diff --git a/apps/web/src/wsRpcClient.ts b/apps/web/src/wsRpcClient.ts index 1d411aa1b9..eadd05f876 100644 --- a/apps/web/src/wsRpcClient.ts +++ b/apps/web/src/wsRpcClient.ts @@ -99,6 +99,65 @@ export interface WsRpcClient { readonly replayEvents: RpcUnaryMethod; readonly onDomainEvent: RpcStreamMethod; }; + readonly cost: { + readonly getSummary: RpcUnaryMethod; + readonly setBudget: RpcUnaryMethod; + readonly getBudgets: RpcUnaryMethod; + readonly onEvent: RpcStreamMethod; + }; + readonly audit: { + readonly query: RpcUnaryMethod; + readonly onEvent: RpcStreamMethod; + }; + readonly ci: { + readonly getStatus: RpcUnaryMethod; + readonly triggerRerun: RpcUnaryMethod; + readonly setFeedbackPolicy: RpcUnaryMethod; + readonly onEvent: RpcStreamMethod; + }; + readonly routing: { + readonly getHealth: RpcUnaryNoArgMethod; + readonly setRules: RpcUnaryMethod; + readonly getRules: RpcUnaryNoArgMethod; + readonly onEvent: RpcStreamMethod; + }; + readonly pipeline: { + readonly create: RpcUnaryMethod; + readonly list: RpcUnaryMethod; + readonly execute: RpcUnaryMethod; + readonly getExecution: RpcUnaryMethod; + readonly cancel: RpcUnaryMethod; + readonly onEvent: RpcStreamMethod; + }; + readonly workflow: { + readonly list: RpcUnaryMethod; + readonly create: RpcUnaryMethod; + readonly delete: RpcUnaryMethod; + readonly execute: RpcUnaryMethod; + }; + readonly task: { + readonly decompose: RpcUnaryMethod; + readonly updateStatus: RpcUnaryMethod; + readonly getTree: RpcUnaryMethod; + readonly listTrees: RpcUnaryMethod; + readonly execute: RpcUnaryMethod; + readonly onEvent: RpcStreamMethod; + }; + readonly memory: { + readonly index: RpcUnaryMethod; + readonly search: RpcUnaryMethod; + readonly add: RpcUnaryMethod; + readonly forget: RpcUnaryMethod; + readonly list: RpcUnaryMethod; + }; + readonly presence: { + readonly join: RpcUnaryMethod; + readonly leave: RpcUnaryMethod; + readonly updateCursor: RpcUnaryMethod; + readonly share: RpcUnaryMethod; + readonly getParticipants: RpcUnaryMethod; + readonly onEvent: RpcStreamMethod; + }; } let sharedWsRpcClient: WsRpcClient | null = null; @@ -226,5 +285,107 @@ export function createWsRpcClient(transport = new WsTransport()): WsRpcClient { options, ), }, + cost: { + getSummary: (input) => + transport.request((client) => client[WS_METHODS.costGetSummary](input)), + setBudget: (input) => transport.request((client) => client[WS_METHODS.costSetBudget](input)), + getBudgets: (input) => + transport.request((client) => client[WS_METHODS.costGetBudgets](input)), + onEvent: (listener, options) => + transport.subscribe( + (client) => client[WS_METHODS.subscribeCostEvents]({}), + listener, + options, + ), + }, + audit: { + query: (input) => transport.request((client) => client[WS_METHODS.auditQuery](input)), + onEvent: (listener, options) => + transport.subscribe( + (client) => client[WS_METHODS.subscribeAuditEvents]({}), + listener, + options, + ), + }, + ci: { + getStatus: (input) => transport.request((client) => client[WS_METHODS.ciGetStatus](input)), + triggerRerun: (input) => + transport.request((client) => client[WS_METHODS.ciTriggerRerun](input)), + setFeedbackPolicy: (input) => + transport.request((client) => client[WS_METHODS.ciSetFeedbackPolicy](input)), + onEvent: (listener, options) => + transport.subscribe( + (client) => client[WS_METHODS.subscribeCIEvents]({}), + listener, + options, + ), + }, + routing: { + getHealth: () => transport.request((client) => client[WS_METHODS.routingGetHealth]({})), + setRules: (input) => transport.request((client) => client[WS_METHODS.routingSetRules](input)), + getRules: () => transport.request((client) => client[WS_METHODS.routingGetRules]({})), + onEvent: (listener, options) => + transport.subscribe( + (client) => client[WS_METHODS.subscribeRoutingEvents]({}), + listener, + options, + ), + }, + pipeline: { + create: (input) => transport.request((client) => client[WS_METHODS.pipelineCreate](input)), + list: (input) => transport.request((client) => client[WS_METHODS.pipelineList](input)), + execute: (input) => transport.request((client) => client[WS_METHODS.pipelineExecute](input)), + getExecution: (input) => + transport.request((client) => client[WS_METHODS.pipelineGetExecution](input)), + cancel: (input) => transport.request((client) => client[WS_METHODS.pipelineCancel](input)), + onEvent: (listener, options) => + transport.subscribe( + (client) => client[WS_METHODS.subscribePipelineEvents]({}), + listener, + options, + ), + }, + workflow: { + list: (input) => transport.request((client) => client[WS_METHODS.workflowList](input)), + create: (input) => transport.request((client) => client[WS_METHODS.workflowCreate](input)), + delete: (input) => transport.request((client) => client[WS_METHODS.workflowDelete](input)), + execute: (input) => transport.request((client) => client[WS_METHODS.workflowExecute](input)), + }, + task: { + decompose: (input) => transport.request((client) => client[WS_METHODS.taskDecompose](input)), + updateStatus: (input) => + transport.request((client) => client[WS_METHODS.taskUpdateStatus](input)), + getTree: (input) => transport.request((client) => client[WS_METHODS.taskGetTree](input)), + listTrees: (input) => transport.request((client) => client[WS_METHODS.taskListTrees](input)), + execute: (input) => transport.request((client) => client[WS_METHODS.taskExecute](input)), + onEvent: (listener, options) => + transport.subscribe( + (client) => client[WS_METHODS.subscribeTaskEvents]({}), + listener, + options, + ), + }, + memory: { + index: (input) => transport.request((client) => client[WS_METHODS.memoryIndex](input)), + search: (input) => transport.request((client) => client[WS_METHODS.memorySearch](input)), + add: (input) => transport.request((client) => client[WS_METHODS.memoryAdd](input)), + forget: (input) => transport.request((client) => client[WS_METHODS.memoryForget](input)), + list: (input) => transport.request((client) => client[WS_METHODS.memoryList](input)), + }, + presence: { + join: (input) => transport.request((client) => client[WS_METHODS.presenceJoin](input)), + leave: (input) => transport.request((client) => client[WS_METHODS.presenceLeave](input)), + updateCursor: (input) => + transport.request((client) => client[WS_METHODS.presenceUpdateCursor](input)), + share: (input) => transport.request((client) => client[WS_METHODS.presenceShare](input)), + getParticipants: (input) => + transport.request((client) => client[WS_METHODS.presenceGetParticipants](input)), + onEvent: (listener, options) => + transport.subscribe( + (client) => client[WS_METHODS.subscribePresenceEvents]({}), + listener, + options, + ), + }, }; } diff --git a/packages/contracts/src/auditLog.ts b/packages/contracts/src/auditLog.ts new file mode 100644 index 0000000000..bb5b41bd87 --- /dev/null +++ b/packages/contracts/src/auditLog.ts @@ -0,0 +1,93 @@ +import { Schema } from "effect"; +import { + CommandId, + EventId, + IsoDateTime, + NonNegativeInt, + ProjectId, + ThreadId, + TrimmedNonEmptyString, +} from "./baseSchemas"; + +const makeEntityId = (brand: Brand) => + TrimmedNonEmptyString.pipe(Schema.brand(brand)); + +export const AuditEntryId = makeEntityId("AuditEntryId"); +export type AuditEntryId = typeof AuditEntryId.Type; + +export const AuditActorKind = Schema.Literals(["user", "system", "provider"]); +export type AuditActorKind = typeof AuditActorKind.Type; + +export const AuditSeverity = Schema.Literals(["info", "warning", "critical"]); +export type AuditSeverity = typeof AuditSeverity.Type; + +export const AuditCategory = Schema.Literals([ + "session", + "command", + "approval", + "auth", + "config", + "provider", + "git", + "file", + "budget", + "pipeline", +]); +export type AuditCategory = typeof AuditCategory.Type; + +export const AuditEntry = Schema.Struct({ + id: AuditEntryId, + timestamp: IsoDateTime, + actor: AuditActorKind, + actorId: Schema.NullOr(TrimmedNonEmptyString), + category: AuditCategory, + action: TrimmedNonEmptyString, + severity: AuditSeverity, + projectId: Schema.NullOr(ProjectId), + threadId: Schema.NullOr(ThreadId), + commandId: Schema.NullOr(CommandId), + eventId: Schema.NullOr(EventId), + summary: TrimmedNonEmptyString, + detail: Schema.NullOr(Schema.String), + metadata: Schema.Record(Schema.String, Schema.Unknown), +}); +export type AuditEntry = typeof AuditEntry.Type; + +export const AuditQueryInput = Schema.Struct({ + projectId: Schema.optional(ProjectId), + threadId: Schema.optional(ThreadId), + category: Schema.optional(AuditCategory), + severity: Schema.optional(AuditSeverity), + actor: Schema.optional(AuditActorKind), + fromTimestamp: Schema.optional(IsoDateTime), + toTimestamp: Schema.optional(IsoDateTime), + limit: NonNegativeInt.pipe(Schema.withDecodingDefault(() => 100)), + offset: NonNegativeInt.pipe(Schema.withDecodingDefault(() => 0)), +}); +export type AuditQueryInput = typeof AuditQueryInput.Type; + +export const AuditQueryResult = Schema.Struct({ + entries: Schema.Array(AuditEntry), + total: NonNegativeInt, + hasMore: Schema.Boolean, +}); +export type AuditQueryResult = typeof AuditQueryResult.Type; + +export const AuditExportInput = Schema.Struct({ + projectId: Schema.optional(ProjectId), + fromTimestamp: Schema.optional(IsoDateTime), + toTimestamp: Schema.optional(IsoDateTime), + format: Schema.Literals(["json", "csv"]).pipe(Schema.withDecodingDefault(() => "json" as const)), +}); +export type AuditExportInput = typeof AuditExportInput.Type; + +export const AuditStreamEvent = Schema.Struct({ + type: Schema.Literal("audit.entry"), + entry: AuditEntry, +}); +export type AuditStreamEvent = typeof AuditStreamEvent.Type; + +export class AuditLogError extends Schema.TaggedErrorClass()("AuditLogError", { + message: Schema.String, + cause: Schema.optional(Schema.Unknown), +}) {} diff --git a/packages/contracts/src/ciIntegration.ts b/packages/contracts/src/ciIntegration.ts new file mode 100644 index 0000000000..21de34d2be --- /dev/null +++ b/packages/contracts/src/ciIntegration.ts @@ -0,0 +1,131 @@ +import { Schema } from "effect"; +import { + IsoDateTime, + NonNegativeInt, + ProjectId, + ThreadId, + TrimmedNonEmptyString, + TurnId, +} from "./baseSchemas"; + +const makeEntityId = (brand: Brand) => + TrimmedNonEmptyString.pipe(Schema.brand(brand)); + +export const CIRunId = makeEntityId("CIRunId"); +export type CIRunId = typeof CIRunId.Type; +export const CIJobId = makeEntityId("CIJobId"); +export type CIJobId = typeof CIJobId.Type; + +export const CIProvider = Schema.Literals(["github-actions", "gitlab-ci", "custom-webhook"]); +export type CIProvider = typeof CIProvider.Type; + +export const CIRunStatus = Schema.Literals([ + "queued", + "in_progress", + "completed", + "failed", + "cancelled", + "timed_out", +]); +export type CIRunStatus = typeof CIRunStatus.Type; + +export const CIConclusion = Schema.Literals([ + "success", + "failure", + "cancelled", + "skipped", + "timed_out", + "neutral", +]); +export type CIConclusion = typeof CIConclusion.Type; + +export const CIJob = Schema.Struct({ + id: CIJobId, + name: TrimmedNonEmptyString, + status: CIRunStatus, + conclusion: Schema.NullOr(CIConclusion), + startedAt: Schema.NullOr(IsoDateTime), + completedAt: Schema.NullOr(IsoDateTime), + logUrl: Schema.NullOr(Schema.String), + logExcerpt: Schema.NullOr(Schema.String), +}); +export type CIJob = typeof CIJob.Type; + +export const CIRun = Schema.Struct({ + id: CIRunId, + projectId: ProjectId, + threadId: Schema.NullOr(ThreadId), + turnId: Schema.NullOr(TurnId), + provider: CIProvider, + workflowName: TrimmedNonEmptyString, + branch: TrimmedNonEmptyString, + commitSha: TrimmedNonEmptyString, + status: CIRunStatus, + conclusion: Schema.NullOr(CIConclusion), + jobs: Schema.Array(CIJob), + htmlUrl: Schema.NullOr(Schema.String), + startedAt: IsoDateTime, + completedAt: Schema.NullOr(IsoDateTime), + updatedAt: IsoDateTime, +}); +export type CIRun = typeof CIRun.Type; + +export const CIFeedbackAction = Schema.Literals(["auto-fix", "notify", "ignore"]); +export type CIFeedbackAction = typeof CIFeedbackAction.Type; + +export const CIFeedbackPolicy = Schema.Struct({ + projectId: ProjectId, + onFailure: CIFeedbackAction.pipe(Schema.withDecodingDefault(() => "notify" as const)), + autoFixMaxAttempts: NonNegativeInt.pipe(Schema.withDecodingDefault(() => 3)), + watchBranches: Schema.Array(TrimmedNonEmptyString), +}); +export type CIFeedbackPolicy = typeof CIFeedbackPolicy.Type; + +export const CIGetStatusInput = Schema.Struct({ + projectId: ProjectId, + threadId: Schema.optional(ThreadId), + branch: Schema.optional(TrimmedNonEmptyString), + limit: NonNegativeInt.pipe(Schema.withDecodingDefault(() => 20)), +}); +export type CIGetStatusInput = typeof CIGetStatusInput.Type; + +export const CIGetStatusResult = Schema.Struct({ + runs: Schema.Array(CIRun), + hasMore: Schema.Boolean, +}); +export type CIGetStatusResult = typeof CIGetStatusResult.Type; + +export const CITriggerRerunInput = Schema.Struct({ + runId: CIRunId, + projectId: ProjectId, + failedOnly: Schema.Boolean.pipe(Schema.withDecodingDefault(() => true)), +}); +export type CITriggerRerunInput = typeof CITriggerRerunInput.Type; + +export const CISetFeedbackPolicyInput = Schema.Struct({ + projectId: ProjectId, + onFailure: CIFeedbackAction, + autoFixMaxAttempts: NonNegativeInt.pipe(Schema.withDecodingDefault(() => 3)), + watchBranches: Schema.Array(TrimmedNonEmptyString), +}); +export type CISetFeedbackPolicyInput = typeof CISetFeedbackPolicyInput.Type; + +export const CIStreamEvent = Schema.Union([ + Schema.Struct({ type: Schema.Literal("ci.run.updated"), run: CIRun }), + Schema.Struct({ + type: Schema.Literal("ci.feedback.triggered"), + runId: CIRunId, + threadId: ThreadId, + action: CIFeedbackAction, + detail: Schema.String, + }), +]); +export type CIStreamEvent = typeof CIStreamEvent.Type; + +export class CIIntegrationError extends Schema.TaggedErrorClass()( + "CIIntegrationError", + { + message: Schema.String, + cause: Schema.optional(Schema.Unknown), + }, +) {} diff --git a/packages/contracts/src/costTracking.ts b/packages/contracts/src/costTracking.ts new file mode 100644 index 0000000000..fa71f2ce86 --- /dev/null +++ b/packages/contracts/src/costTracking.ts @@ -0,0 +1,128 @@ +import { Schema } from "effect"; +import { + IsoDateTime, + NonNegativeInt, + ProjectId, + ThreadId, + TrimmedNonEmptyString, + TurnId, +} from "./baseSchemas"; +import { ProviderKind } from "./orchestration"; + +const makeEntityId = (brand: Brand) => + TrimmedNonEmptyString.pipe(Schema.brand(brand)); + +export const CostEntryId = makeEntityId("CostEntryId"); +export type CostEntryId = typeof CostEntryId.Type; +export const BudgetId = makeEntityId("BudgetId"); +export type BudgetId = typeof BudgetId.Type; + +export const TokenUsage = Schema.Struct({ + inputTokens: NonNegativeInt, + outputTokens: NonNegativeInt, + cacheReadTokens: NonNegativeInt.pipe(Schema.withDecodingDefault(() => 0)), + cacheWriteTokens: NonNegativeInt.pipe(Schema.withDecodingDefault(() => 0)), + thinkingTokens: NonNegativeInt.pipe(Schema.withDecodingDefault(() => 0)), +}); +export type TokenUsage = typeof TokenUsage.Type; + +/** Cents (USD × 100) to avoid floating-point drift. */ +export const CostCents = Schema.Int.check(Schema.isGreaterThanOrEqualTo(0)); +export type CostCents = typeof CostCents.Type; + +export const CostEntry = Schema.Struct({ + id: CostEntryId, + threadId: ThreadId, + projectId: ProjectId, + turnId: Schema.NullOr(TurnId), + provider: ProviderKind, + model: TrimmedNonEmptyString, + usage: TokenUsage, + costCents: CostCents, + createdAt: IsoDateTime, +}); +export type CostEntry = typeof CostEntry.Type; + +export const CostSummary = Schema.Struct({ + totalCostCents: CostCents, + totalInputTokens: NonNegativeInt, + totalOutputTokens: NonNegativeInt, + totalThinkingTokens: NonNegativeInt, + byProvider: Schema.Array( + Schema.Struct({ + provider: ProviderKind, + costCents: CostCents, + inputTokens: NonNegativeInt, + outputTokens: NonNegativeInt, + }), + ), + byThread: Schema.Array( + Schema.Struct({ + threadId: ThreadId, + costCents: CostCents, + }), + ), + periodStart: IsoDateTime, + periodEnd: IsoDateTime, +}); +export type CostSummary = typeof CostSummary.Type; + +export const CostBudget = Schema.Struct({ + id: BudgetId, + projectId: Schema.NullOr(ProjectId), + limitCents: CostCents, + periodDays: NonNegativeInt, + currentSpendCents: CostCents, + alertThresholdPercent: NonNegativeInt.pipe(Schema.withDecodingDefault(() => 80)), + enabled: Schema.Boolean, + createdAt: IsoDateTime, + updatedAt: IsoDateTime, +}); +export type CostBudget = typeof CostBudget.Type; + +export const CostAlert = Schema.Struct({ + budgetId: BudgetId, + projectId: Schema.NullOr(ProjectId), + currentSpendCents: CostCents, + limitCents: CostCents, + percentUsed: NonNegativeInt, + alertedAt: IsoDateTime, +}); +export type CostAlert = typeof CostAlert.Type; + +export const CostGetSummaryInput = Schema.Struct({ + projectId: Schema.optional(ProjectId), + threadId: Schema.optional(ThreadId), + periodStart: Schema.optional(IsoDateTime), + periodEnd: Schema.optional(IsoDateTime), +}); +export type CostGetSummaryInput = typeof CostGetSummaryInput.Type; + +export const CostSetBudgetInput = Schema.Struct({ + budgetId: BudgetId, + projectId: Schema.NullOr(ProjectId), + limitCents: CostCents, + periodDays: NonNegativeInt, + alertThresholdPercent: NonNegativeInt.pipe(Schema.withDecodingDefault(() => 80)), + enabled: Schema.Boolean, +}); +export type CostSetBudgetInput = typeof CostSetBudgetInput.Type; + +export const CostGetBudgetsInput = Schema.Struct({ + projectId: Schema.optional(ProjectId), +}); + +export const CostStreamEvent = Schema.Union([ + Schema.Struct({ type: Schema.Literal("cost.entry"), entry: CostEntry }), + Schema.Struct({ type: Schema.Literal("cost.alert"), alert: CostAlert }), + Schema.Struct({ type: Schema.Literal("cost.budget.updated"), budget: CostBudget }), +]); +export type CostStreamEvent = typeof CostStreamEvent.Type; + +export class CostTrackingError extends Schema.TaggedErrorClass()( + "CostTrackingError", + { + message: Schema.String, + cause: Schema.optional(Schema.Unknown), + }, +) {} diff --git a/packages/contracts/src/index.ts b/packages/contracts/src/index.ts index c60856bbe5..3e6342659a 100644 --- a/packages/contracts/src/index.ts +++ b/packages/contracts/src/index.ts @@ -12,3 +12,12 @@ export * from "./orchestration"; export * from "./editor"; export * from "./project"; export * from "./rpc"; +export * from "./costTracking"; +export * from "./auditLog"; +export * from "./ciIntegration"; +export * from "./routing"; +export * from "./pipelines"; +export * from "./workflows"; +export * from "./taskDecomposition"; +export * from "./projectMemory"; +export * from "./presence"; diff --git a/packages/contracts/src/orchestration.ts b/packages/contracts/src/orchestration.ts index 247c86ac15..5210af3233 100644 --- a/packages/contracts/src/orchestration.ts +++ b/packages/contracts/src/orchestration.ts @@ -757,6 +757,16 @@ const ThreadActivityAppendCommand = Schema.Struct({ createdAt: IsoDateTime, }); +const ThreadBranchFromCheckpointCommand = Schema.Struct({ + type: Schema.Literal("thread.branch-from-checkpoint"), + commandId: CommandId, + sourceThreadId: ThreadId, + newThreadId: ThreadId, + checkpointTurnCount: NonNegativeInt, + title: TrimmedNonEmptyString, + createdAt: IsoDateTime, +}); + const ThreadRevertCompleteCommand = Schema.Struct({ type: Schema.Literal("thread.revert.complete"), commandId: CommandId, @@ -773,6 +783,7 @@ const InternalOrchestrationCommand = Schema.Union([ ThreadTurnDiffCompleteCommand, ThreadActivityAppendCommand, ThreadRevertCompleteCommand, + ThreadBranchFromCheckpointCommand, ]); export type InternalOrchestrationCommand = typeof InternalOrchestrationCommand.Type; @@ -805,6 +816,7 @@ export const OrchestrationEventType = Schema.Literals([ "thread.proposed-plan-upserted", "thread.turn-diff-completed", "thread.activity-appended", + "thread.branched-from-checkpoint", ]); export type OrchestrationEventType = typeof OrchestrationEventType.Type; @@ -977,6 +989,14 @@ export const ThreadActivityAppendedPayload = Schema.Struct({ activity: OrchestrationThreadActivity, }); +export const ThreadBranchedFromCheckpointPayload = Schema.Struct({ + sourceThreadId: ThreadId, + newThreadId: ThreadId, + checkpointTurnCount: NonNegativeInt, + title: TrimmedNonEmptyString, + createdAt: IsoDateTime, +}); + export const OrchestrationEventMetadata = Schema.Struct({ providerTurnId: Schema.optional(TrimmedNonEmptyString), providerItemId: Schema.optional(ProviderItemId), @@ -1109,6 +1129,11 @@ export const OrchestrationEvent = Schema.Union([ type: Schema.Literal("thread.activity-appended"), payload: ThreadActivityAppendedPayload, }), + Schema.Struct({ + ...EventBaseFields, + type: Schema.Literal("thread.branched-from-checkpoint"), + payload: ThreadBranchedFromCheckpointPayload, + }), ]); export type OrchestrationEvent = typeof OrchestrationEvent.Type; diff --git a/packages/contracts/src/pipelines.ts b/packages/contracts/src/pipelines.ts new file mode 100644 index 0000000000..402b6a7c9b --- /dev/null +++ b/packages/contracts/src/pipelines.ts @@ -0,0 +1,149 @@ +import { Schema } from "effect"; +import { + IsoDateTime, + NonNegativeInt, + ProjectId, + ThreadId, + TrimmedNonEmptyString, +} from "./baseSchemas"; +import { ModelSelection, ProviderKind, RuntimeMode } from "./orchestration"; + +const makeEntityId = (brand: Brand) => + TrimmedNonEmptyString.pipe(Schema.brand(brand)); + +export const PipelineId = makeEntityId("PipelineId"); +export type PipelineId = typeof PipelineId.Type; +export const PipelineStageId = makeEntityId("PipelineStageId"); +export type PipelineStageId = typeof PipelineStageId.Type; +export const PipelineExecutionId = makeEntityId("PipelineExecutionId"); +export type PipelineExecutionId = typeof PipelineExecutionId.Type; + +export const PipelineStageKind = Schema.Literals([ + "agent-task", + "review", + "test", + "gate", + "parallel-fan-out", +]); +export type PipelineStageKind = typeof PipelineStageKind.Type; + +export const PipelineStageStatus = Schema.Literals([ + "pending", + "running", + "completed", + "failed", + "skipped", + "waiting-approval", +]); +export type PipelineStageStatus = typeof PipelineStageStatus.Type; + +export const PipelineStage = Schema.Struct({ + id: PipelineStageId, + name: TrimmedNonEmptyString, + kind: PipelineStageKind, + provider: Schema.optional(ProviderKind), + modelSelection: Schema.optional(ModelSelection), + prompt: TrimmedNonEmptyString, + dependsOn: Schema.Array(PipelineStageId), + runtimeMode: Schema.optional(RuntimeMode), + timeoutMs: NonNegativeInt.pipe(Schema.withDecodingDefault(() => 300_000)), + retryOnFailure: Schema.Boolean.pipe(Schema.withDecodingDefault(() => false)), + maxRetries: NonNegativeInt.pipe(Schema.withDecodingDefault(() => 1)), +}); +export type PipelineStage = typeof PipelineStage.Type; + +export const PipelineDefinition = Schema.Struct({ + id: PipelineId, + name: TrimmedNonEmptyString, + description: Schema.NullOr(Schema.String), + projectId: ProjectId, + stages: Schema.Array(PipelineStage), + createdAt: IsoDateTime, + updatedAt: IsoDateTime, +}); +export type PipelineDefinition = typeof PipelineDefinition.Type; + +export const PipelineExecutionStatus = Schema.Literals([ + "pending", + "running", + "completed", + "failed", + "cancelled", +]); +export type PipelineExecutionStatus = typeof PipelineExecutionStatus.Type; + +export const PipelineStageExecution = Schema.Struct({ + stageId: PipelineStageId, + status: PipelineStageStatus, + threadId: Schema.NullOr(ThreadId), + startedAt: Schema.NullOr(IsoDateTime), + completedAt: Schema.NullOr(IsoDateTime), + error: Schema.NullOr(Schema.String), + retryCount: NonNegativeInt, + output: Schema.NullOr(Schema.String), +}); +export type PipelineStageExecution = typeof PipelineStageExecution.Type; + +export const PipelineExecution = Schema.Struct({ + id: PipelineExecutionId, + pipelineId: PipelineId, + projectId: ProjectId, + status: PipelineExecutionStatus, + stages: Schema.Array(PipelineStageExecution), + startedAt: IsoDateTime, + completedAt: Schema.NullOr(IsoDateTime), + updatedAt: IsoDateTime, +}); +export type PipelineExecution = typeof PipelineExecution.Type; + +export const PipelineCreateInput = Schema.Struct({ + name: TrimmedNonEmptyString, + description: Schema.NullOr(Schema.String), + projectId: ProjectId, + stages: Schema.Array(PipelineStage), +}); +export type PipelineCreateInput = typeof PipelineCreateInput.Type; + +export const PipelineExecuteInput = Schema.Struct({ + pipelineId: PipelineId, + projectId: ProjectId, +}); +export type PipelineExecuteInput = typeof PipelineExecuteInput.Type; + +export const PipelineListInput = Schema.Struct({ + projectId: ProjectId, +}); +export type PipelineListInput = typeof PipelineListInput.Type; + +export const PipelineListResult = Schema.Struct({ + pipelines: Schema.Array(PipelineDefinition), +}); +export type PipelineListResult = typeof PipelineListResult.Type; + +export const PipelineGetExecutionInput = Schema.Struct({ + executionId: PipelineExecutionId, +}); +export type PipelineGetExecutionInput = typeof PipelineGetExecutionInput.Type; + +export const PipelineCancelInput = Schema.Struct({ + executionId: PipelineExecutionId, +}); +export type PipelineCancelInput = typeof PipelineCancelInput.Type; + +export const PipelineStreamEvent = Schema.Union([ + Schema.Struct({ + type: Schema.Literal("pipeline.execution.updated"), + execution: PipelineExecution, + }), + Schema.Struct({ + type: Schema.Literal("pipeline.stage.updated"), + executionId: PipelineExecutionId, + stage: PipelineStageExecution, + }), +]); +export type PipelineStreamEvent = typeof PipelineStreamEvent.Type; + +export class PipelineError extends Schema.TaggedErrorClass()("PipelineError", { + message: Schema.String, + cause: Schema.optional(Schema.Unknown), +}) {} diff --git a/packages/contracts/src/presence.ts b/packages/contracts/src/presence.ts new file mode 100644 index 0000000000..ed662e1165 --- /dev/null +++ b/packages/contracts/src/presence.ts @@ -0,0 +1,103 @@ +import { Schema } from "effect"; +import { IsoDateTime, NonNegativeInt, ThreadId, TrimmedNonEmptyString } from "./baseSchemas"; + +const makeEntityId = (brand: Brand) => + TrimmedNonEmptyString.pipe(Schema.brand(brand)); + +export const SessionShareId = makeEntityId("SessionShareId"); +export type SessionShareId = typeof SessionShareId.Type; +export const ParticipantId = makeEntityId("ParticipantId"); +export type ParticipantId = typeof ParticipantId.Type; + +export const ParticipantRole = Schema.Literals(["owner", "collaborator", "viewer"]); +export type ParticipantRole = typeof ParticipantRole.Type; + +export const PresenceCursorKind = Schema.Literals(["viewing", "typing", "idle"]); +export type PresenceCursorKind = typeof PresenceCursorKind.Type; + +export const Participant = Schema.Struct({ + id: ParticipantId, + displayName: TrimmedNonEmptyString, + role: ParticipantRole, + color: TrimmedNonEmptyString, + cursor: PresenceCursorKind.pipe(Schema.withDecodingDefault(() => "idle" as const)), + activeThreadId: Schema.NullOr(ThreadId), + connectedAt: IsoDateTime, + lastSeenAt: IsoDateTime, +}); +export type Participant = typeof Participant.Type; + +export const SessionShare = Schema.Struct({ + id: SessionShareId, + threadId: ThreadId, + ownerId: ParticipantId, + participants: Schema.Array(Participant), + maxParticipants: NonNegativeInt.pipe(Schema.withDecodingDefault(() => 10)), + isPublic: Schema.Boolean.pipe(Schema.withDecodingDefault(() => false)), + createdAt: IsoDateTime, + expiresAt: Schema.NullOr(IsoDateTime), +}); +export type SessionShare = typeof SessionShare.Type; + +export const PresenceJoinInput = Schema.Struct({ + threadId: ThreadId, + displayName: TrimmedNonEmptyString, + role: ParticipantRole.pipe(Schema.withDecodingDefault(() => "viewer" as const)), +}); +export type PresenceJoinInput = typeof PresenceJoinInput.Type; + +export const PresenceLeaveInput = Schema.Struct({ + threadId: ThreadId, + participantId: ParticipantId, +}); +export type PresenceLeaveInput = typeof PresenceLeaveInput.Type; + +export const PresenceUpdateCursorInput = Schema.Struct({ + threadId: ThreadId, + participantId: ParticipantId, + cursor: PresenceCursorKind, +}); +export type PresenceUpdateCursorInput = typeof PresenceUpdateCursorInput.Type; + +export const PresenceShareInput = Schema.Struct({ + threadId: ThreadId, + isPublic: Schema.Boolean.pipe(Schema.withDecodingDefault(() => false)), + maxParticipants: NonNegativeInt.pipe(Schema.withDecodingDefault(() => 10)), +}); +export type PresenceShareInput = typeof PresenceShareInput.Type; + +export const PresenceGetParticipantsInput = Schema.Struct({ + threadId: ThreadId, +}); +export type PresenceGetParticipantsInput = typeof PresenceGetParticipantsInput.Type; + +export const PresenceGetParticipantsResult = Schema.Struct({ + participants: Schema.Array(Participant), + share: Schema.NullOr(SessionShare), +}); +export type PresenceGetParticipantsResult = typeof PresenceGetParticipantsResult.Type; + +export const PresenceStreamEvent = Schema.Union([ + Schema.Struct({ type: Schema.Literal("presence.joined"), participant: Participant }), + Schema.Struct({ + type: Schema.Literal("presence.left"), + participantId: ParticipantId, + threadId: ThreadId, + }), + Schema.Struct({ + type: Schema.Literal("presence.cursor.updated"), + participantId: ParticipantId, + cursor: PresenceCursorKind, + threadId: ThreadId, + }), + Schema.Struct({ + type: Schema.Literal("presence.share.created"), + share: SessionShare, + }), +]); +export type PresenceStreamEvent = typeof PresenceStreamEvent.Type; + +export class PresenceError extends Schema.TaggedErrorClass()("PresenceError", { + message: Schema.String, + cause: Schema.optional(Schema.Unknown), +}) {} diff --git a/packages/contracts/src/projectMemory.ts b/packages/contracts/src/projectMemory.ts new file mode 100644 index 0000000000..f488b2d826 --- /dev/null +++ b/packages/contracts/src/projectMemory.ts @@ -0,0 +1,117 @@ +import { Schema } from "effect"; +import { + IsoDateTime, + NonNegativeInt, + ProjectId, + ThreadId, + TrimmedNonEmptyString, +} from "./baseSchemas"; + +const makeEntityId = (brand: Brand) => + TrimmedNonEmptyString.pipe(Schema.brand(brand)); + +export const MemoryEntryId = makeEntityId("MemoryEntryId"); +export type MemoryEntryId = typeof MemoryEntryId.Type; + +export const MemoryKind = Schema.Literals([ + "architectural-decision", + "code-pattern", + "bug-fix", + "convention", + "dependency-note", + "session-insight", + "file-summary", + "custom", +]); +export type MemoryKind = typeof MemoryKind.Type; + +export const MemoryEntry = Schema.Struct({ + id: MemoryEntryId, + projectId: ProjectId, + threadId: Schema.NullOr(ThreadId), + kind: MemoryKind, + title: TrimmedNonEmptyString, + content: TrimmedNonEmptyString, + tags: Schema.Array(TrimmedNonEmptyString), + relevanceScore: Schema.Number.check( + Schema.isGreaterThanOrEqualTo(0), + Schema.isLessThanOrEqualTo(1), + ).pipe(Schema.withDecodingDefault(() => 0.5)), + accessCount: NonNegativeInt.pipe(Schema.withDecodingDefault(() => 0)), + createdAt: IsoDateTime, + updatedAt: IsoDateTime, + expiresAt: Schema.NullOr(IsoDateTime), +}); +export type MemoryEntry = typeof MemoryEntry.Type; + +export const MemorySearchResult = Schema.Struct({ + entry: MemoryEntry, + matchScore: Schema.Number, + matchSnippet: Schema.NullOr(Schema.String), +}); +export type MemorySearchResult = typeof MemorySearchResult.Type; + +export const MemoryIndexInput = Schema.Struct({ + projectId: ProjectId, + forceReindex: Schema.Boolean.pipe(Schema.withDecodingDefault(() => false)), +}); +export type MemoryIndexInput = typeof MemoryIndexInput.Type; + +export const MemoryIndexResult = Schema.Struct({ + entriesIndexed: NonNegativeInt, + duration: NonNegativeInt, +}); +export type MemoryIndexResult = typeof MemoryIndexResult.Type; + +export const MemorySearchInput = Schema.Struct({ + projectId: ProjectId, + query: TrimmedNonEmptyString, + kind: Schema.optional(MemoryKind), + tags: Schema.optional(Schema.Array(TrimmedNonEmptyString)), + limit: NonNegativeInt.pipe(Schema.withDecodingDefault(() => 10)), +}); +export type MemorySearchInput = typeof MemorySearchInput.Type; + +export const MemorySearchOutput = Schema.Struct({ + results: Schema.Array(MemorySearchResult), + queryTime: NonNegativeInt, +}); +export type MemorySearchOutput = typeof MemorySearchOutput.Type; + +export const MemoryAddInput = Schema.Struct({ + projectId: ProjectId, + threadId: Schema.optional(ThreadId), + kind: MemoryKind, + title: TrimmedNonEmptyString, + content: TrimmedNonEmptyString, + tags: Schema.Array(TrimmedNonEmptyString), + expiresAt: Schema.optional(IsoDateTime), +}); +export type MemoryAddInput = typeof MemoryAddInput.Type; + +export const MemoryForgetInput = Schema.Struct({ + entryId: MemoryEntryId, +}); +export type MemoryForgetInput = typeof MemoryForgetInput.Type; + +export const MemoryListInput = Schema.Struct({ + projectId: ProjectId, + kind: Schema.optional(MemoryKind), + limit: NonNegativeInt.pipe(Schema.withDecodingDefault(() => 50)), + offset: NonNegativeInt.pipe(Schema.withDecodingDefault(() => 0)), +}); +export type MemoryListInput = typeof MemoryListInput.Type; + +export const MemoryListResult = Schema.Struct({ + entries: Schema.Array(MemoryEntry), + total: NonNegativeInt, +}); +export type MemoryListResult = typeof MemoryListResult.Type; + +export class ProjectMemoryError extends Schema.TaggedErrorClass()( + "ProjectMemoryError", + { + message: Schema.String, + cause: Schema.optional(Schema.Unknown), + }, +) {} diff --git a/packages/contracts/src/routing.ts b/packages/contracts/src/routing.ts new file mode 100644 index 0000000000..b9e16a4733 --- /dev/null +++ b/packages/contracts/src/routing.ts @@ -0,0 +1,104 @@ +import { Schema } from "effect"; +import { IsoDateTime, NonNegativeInt, ProjectId, TrimmedNonEmptyString } from "./baseSchemas"; +import { ProviderKind } from "./orchestration"; + +export const RoutingStrategyKind = Schema.Literals([ + "round-robin", + "cost-optimized", + "latency-optimized", + "capability-match", + "manual", +]); +export type RoutingStrategyKind = typeof RoutingStrategyKind.Type; + +export const FailoverTrigger = Schema.Literals([ + "error", + "timeout", + "rate-limit", + "budget-exceeded", +]); +export type FailoverTrigger = typeof FailoverTrigger.Type; + +export const ProviderHealthStatus = Schema.Literals(["healthy", "degraded", "down", "unknown"]); +export type ProviderHealthStatus = typeof ProviderHealthStatus.Type; + +export const ProviderHealth = Schema.Struct({ + provider: ProviderKind, + status: ProviderHealthStatus, + latencyMs: Schema.NullOr(NonNegativeInt), + errorRate: Schema.Number.check(Schema.isGreaterThanOrEqualTo(0), Schema.isLessThanOrEqualTo(1)), + lastCheckedAt: IsoDateTime, + lastErrorAt: Schema.NullOr(IsoDateTime), + consecutiveFailures: NonNegativeInt, +}); +export type ProviderHealth = typeof ProviderHealth.Type; + +export const FailoverPolicy = Schema.Struct({ + projectId: Schema.NullOr(ProjectId), + triggers: Schema.Array(FailoverTrigger), + fallbackChain: Schema.Array(ProviderKind), + maxRetries: NonNegativeInt.pipe(Schema.withDecodingDefault(() => 2)), + retryDelayMs: NonNegativeInt.pipe(Schema.withDecodingDefault(() => 1000)), + enabled: Schema.Boolean, +}); +export type FailoverPolicy = typeof FailoverPolicy.Type; + +export const RoutingRule = Schema.Struct({ + name: TrimmedNonEmptyString, + projectId: Schema.NullOr(ProjectId), + strategy: RoutingStrategyKind, + preferredProviders: Schema.Array(ProviderKind), + excludedProviders: Schema.Array(ProviderKind), + taskPatterns: Schema.Array(TrimmedNonEmptyString), + failoverPolicy: FailoverPolicy, + priority: NonNegativeInt, +}); +export type RoutingRule = typeof RoutingRule.Type; + +export const RoutingDecision = Schema.Struct({ + selectedProvider: ProviderKind, + reason: TrimmedNonEmptyString, + alternatives: Schema.Array(ProviderKind), + failoverAttempt: NonNegativeInt, + decidedAt: IsoDateTime, +}); +export type RoutingDecision = typeof RoutingDecision.Type; + +export const RoutingGetHealthInput = Schema.Struct({}); + +export const RoutingGetHealthResult = Schema.Struct({ + providers: Schema.Array(ProviderHealth), + updatedAt: IsoDateTime, +}); +export type RoutingGetHealthResult = typeof RoutingGetHealthResult.Type; + +export const RoutingSetRulesInput = Schema.Struct({ + rules: Schema.Array(RoutingRule), +}); +export type RoutingSetRulesInput = typeof RoutingSetRulesInput.Type; + +export const RoutingGetRulesResult = Schema.Struct({ + rules: Schema.Array(RoutingRule), +}); +export type RoutingGetRulesResult = typeof RoutingGetRulesResult.Type; + +export const RoutingStreamEvent = Schema.Union([ + Schema.Struct({ type: Schema.Literal("routing.health.updated"), health: ProviderHealth }), + Schema.Struct({ type: Schema.Literal("routing.decision"), decision: RoutingDecision }), + Schema.Struct({ + type: Schema.Literal("routing.failover"), + fromProvider: ProviderKind, + toProvider: ProviderKind, + trigger: FailoverTrigger, + detail: Schema.String, + }), +]); +export type RoutingStreamEvent = typeof RoutingStreamEvent.Type; + +export class ProviderRoutingError extends Schema.TaggedErrorClass()( + "ProviderRoutingError", + { + message: Schema.String, + cause: Schema.optional(Schema.Unknown), + }, +) {} diff --git a/packages/contracts/src/rpc.ts b/packages/contracts/src/rpc.ts index 34968e66ec..c8d072132e 100644 --- a/packages/contracts/src/rpc.ts +++ b/packages/contracts/src/rpc.ts @@ -2,6 +2,25 @@ import { Schema } from "effect"; import * as Rpc from "effect/unstable/rpc/Rpc"; import * as RpcGroup from "effect/unstable/rpc/RpcGroup"; +import { AuditLogError, AuditQueryInput, AuditQueryResult, AuditStreamEvent } from "./auditLog"; +import { + CIGetStatusInput, + CIGetStatusResult, + CIIntegrationError, + CISetFeedbackPolicyInput, + CIFeedbackPolicy, + CIStreamEvent, + CITriggerRerunInput, +} from "./ciIntegration"; +import { + CostBudget, + CostGetBudgetsInput, + CostGetSummaryInput, + CostSetBudgetInput, + CostStreamEvent, + CostSummary, + CostTrackingError, +} from "./costTracking"; import { OpenError, OpenInEditorInput } from "./editor"; import { GitActionProgressEvent, @@ -60,6 +79,50 @@ import { TerminalSessionSnapshot, TerminalWriteInput, } from "./terminal"; +import { + MemoryAddInput, + MemoryEntry, + MemoryForgetInput, + MemoryIndexInput, + MemoryIndexResult, + MemoryListInput, + MemoryListResult, + MemorySearchInput, + MemorySearchOutput, + ProjectMemoryError, +} from "./projectMemory"; +import { + PipelineCreateInput, + PipelineDefinition, + PipelineError, + PipelineExecution, + PipelineExecuteInput, + PipelineGetExecutionInput, + PipelineListInput, + PipelineListResult, + PipelineCancelInput, + PipelineStreamEvent, +} from "./pipelines"; +import { + PresenceError, + PresenceGetParticipantsInput, + PresenceGetParticipantsResult, + PresenceJoinInput, + PresenceLeaveInput, + PresenceShareInput, + PresenceStreamEvent, + PresenceUpdateCursorInput, + SessionShare, + Participant, +} from "./presence"; +import { + ProviderRoutingError, + RoutingGetHealthInput, + RoutingGetHealthResult, + RoutingGetRulesResult, + RoutingSetRulesInput, + RoutingStreamEvent, +} from "./routing"; import { ServerConfigStreamEvent, ServerConfig, @@ -69,6 +132,26 @@ import { ServerUpsertKeybindingResult, } from "./server"; import { ServerSettings, ServerSettingsError, ServerSettingsPatch } from "./settings"; +import { + TaskDecomposeInput, + TaskDecompositionError, + TaskExecuteInput, + TaskGetTreeInput, + TaskListTreesInput, + TaskListTreesResult, + TaskStreamEvent, + TaskTree, + TaskUpdateStatusInput, +} from "./taskDecomposition"; +import { + WorkflowCreateInput, + WorkflowDeleteInput, + WorkflowError, + WorkflowExecuteInput, + WorkflowListInput, + WorkflowListResult, + WorkflowTemplate, +} from "./workflows"; export const WS_METHODS = { // Project registry methods @@ -109,11 +192,70 @@ export const WS_METHODS = { serverGetSettings: "server.getSettings", serverUpdateSettings: "server.updateSettings", + // Cost tracking + costGetSummary: "cost.getSummary", + costSetBudget: "cost.setBudget", + costGetBudgets: "cost.getBudgets", + + // Audit log + auditQuery: "audit.query", + + // CI/CD integration + ciGetStatus: "ci.getStatus", + ciTriggerRerun: "ci.triggerRerun", + ciSetFeedbackPolicy: "ci.setFeedbackPolicy", + + // Provider routing + routingGetHealth: "routing.getHealth", + routingSetRules: "routing.setRules", + routingGetRules: "routing.getRules", + + // Pipelines + pipelineCreate: "pipeline.create", + pipelineList: "pipeline.list", + pipelineExecute: "pipeline.execute", + pipelineGetExecution: "pipeline.getExecution", + pipelineCancel: "pipeline.cancel", + + // Workflows + workflowList: "workflow.list", + workflowCreate: "workflow.create", + workflowDelete: "workflow.delete", + workflowExecute: "workflow.execute", + + // Task decomposition + taskDecompose: "task.decompose", + taskUpdateStatus: "task.updateStatus", + taskGetTree: "task.getTree", + taskListTrees: "task.listTrees", + taskExecute: "task.execute", + + // Project memory + memoryIndex: "memory.index", + memorySearch: "memory.search", + memoryAdd: "memory.add", + memoryForget: "memory.forget", + memoryList: "memory.list", + + // Presence + presenceJoin: "presence.join", + presenceLeave: "presence.leave", + presenceUpdateCursor: "presence.updateCursor", + presenceShare: "presence.share", + presenceGetParticipants: "presence.getParticipants", + // Streaming subscriptions subscribeOrchestrationDomainEvents: "subscribeOrchestrationDomainEvents", subscribeTerminalEvents: "subscribeTerminalEvents", subscribeServerConfig: "subscribeServerConfig", subscribeServerLifecycle: "subscribeServerLifecycle", + subscribeCostEvents: "subscribeCostEvents", + subscribeAuditEvents: "subscribeAuditEvents", + subscribeCIEvents: "subscribeCIEvents", + subscribeRoutingEvents: "subscribeRoutingEvents", + subscribePipelineEvents: "subscribePipelineEvents", + subscribeTaskEvents: "subscribeTaskEvents", + subscribePresenceEvents: "subscribePresenceEvents", } as const; export const WsServerUpsertKeybindingRpc = Rpc.make(WS_METHODS.serverUpsertKeybinding, { @@ -321,6 +463,222 @@ export const WsSubscribeServerLifecycleRpc = Rpc.make(WS_METHODS.subscribeServer stream: true, }); +// --- Cost Tracking RPCs --- +export const WsCostGetSummaryRpc = Rpc.make(WS_METHODS.costGetSummary, { + payload: CostGetSummaryInput, + success: CostSummary, + error: CostTrackingError, +}); +export const WsCostSetBudgetRpc = Rpc.make(WS_METHODS.costSetBudget, { + payload: CostSetBudgetInput, + success: CostBudget, + error: CostTrackingError, +}); +export const WsCostGetBudgetsRpc = Rpc.make(WS_METHODS.costGetBudgets, { + payload: CostGetBudgetsInput, + success: Schema.Struct({ budgets: Schema.Array(CostBudget) }), + error: CostTrackingError, +}); +export const WsSubscribeCostEventsRpc = Rpc.make(WS_METHODS.subscribeCostEvents, { + payload: Schema.Struct({}), + success: CostStreamEvent, + stream: true, +}); + +// --- Audit Log RPCs --- +export const WsAuditQueryRpc = Rpc.make(WS_METHODS.auditQuery, { + payload: AuditQueryInput, + success: AuditQueryResult, + error: AuditLogError, +}); +export const WsSubscribeAuditEventsRpc = Rpc.make(WS_METHODS.subscribeAuditEvents, { + payload: Schema.Struct({}), + success: AuditStreamEvent, + stream: true, +}); + +// --- CI/CD RPCs --- +export const WsCIGetStatusRpc = Rpc.make(WS_METHODS.ciGetStatus, { + payload: CIGetStatusInput, + success: CIGetStatusResult, + error: CIIntegrationError, +}); +export const WsCITriggerRerunRpc = Rpc.make(WS_METHODS.ciTriggerRerun, { + payload: CITriggerRerunInput, + error: CIIntegrationError, +}); +export const WsCISetFeedbackPolicyRpc = Rpc.make(WS_METHODS.ciSetFeedbackPolicy, { + payload: CISetFeedbackPolicyInput, + success: CIFeedbackPolicy, + error: CIIntegrationError, +}); +export const WsSubscribeCIEventsRpc = Rpc.make(WS_METHODS.subscribeCIEvents, { + payload: Schema.Struct({}), + success: CIStreamEvent, + stream: true, +}); + +// --- Routing RPCs --- +export const WsRoutingGetHealthRpc = Rpc.make(WS_METHODS.routingGetHealth, { + payload: RoutingGetHealthInput, + success: RoutingGetHealthResult, + error: ProviderRoutingError, +}); +export const WsRoutingSetRulesRpc = Rpc.make(WS_METHODS.routingSetRules, { + payload: RoutingSetRulesInput, + success: RoutingGetRulesResult, + error: ProviderRoutingError, +}); +export const WsRoutingGetRulesRpc = Rpc.make(WS_METHODS.routingGetRules, { + payload: Schema.Struct({}), + success: RoutingGetRulesResult, + error: ProviderRoutingError, +}); +export const WsSubscribeRoutingEventsRpc = Rpc.make(WS_METHODS.subscribeRoutingEvents, { + payload: Schema.Struct({}), + success: RoutingStreamEvent, + stream: true, +}); + +// --- Pipeline RPCs --- +export const WsPipelineCreateRpc = Rpc.make(WS_METHODS.pipelineCreate, { + payload: PipelineCreateInput, + success: PipelineDefinition, + error: PipelineError, +}); +export const WsPipelineListRpc = Rpc.make(WS_METHODS.pipelineList, { + payload: PipelineListInput, + success: PipelineListResult, + error: PipelineError, +}); +export const WsPipelineExecuteRpc = Rpc.make(WS_METHODS.pipelineExecute, { + payload: PipelineExecuteInput, + success: PipelineExecution, + error: PipelineError, +}); +export const WsPipelineGetExecutionRpc = Rpc.make(WS_METHODS.pipelineGetExecution, { + payload: PipelineGetExecutionInput, + success: PipelineExecution, + error: PipelineError, +}); +export const WsPipelineCancelRpc = Rpc.make(WS_METHODS.pipelineCancel, { + payload: PipelineCancelInput, + error: PipelineError, +}); +export const WsSubscribePipelineEventsRpc = Rpc.make(WS_METHODS.subscribePipelineEvents, { + payload: Schema.Struct({}), + success: PipelineStreamEvent, + stream: true, +}); + +// --- Workflow RPCs --- +export const WsWorkflowListRpc = Rpc.make(WS_METHODS.workflowList, { + payload: WorkflowListInput, + success: WorkflowListResult, + error: WorkflowError, +}); +export const WsWorkflowCreateRpc = Rpc.make(WS_METHODS.workflowCreate, { + payload: WorkflowCreateInput, + success: WorkflowTemplate, + error: WorkflowError, +}); +export const WsWorkflowDeleteRpc = Rpc.make(WS_METHODS.workflowDelete, { + payload: WorkflowDeleteInput, + error: WorkflowError, +}); +export const WsWorkflowExecuteRpc = Rpc.make(WS_METHODS.workflowExecute, { + payload: WorkflowExecuteInput, + error: WorkflowError, +}); + +// --- Task Decomposition RPCs --- +export const WsTaskDecomposeRpc = Rpc.make(WS_METHODS.taskDecompose, { + payload: TaskDecomposeInput, + success: TaskTree, + error: TaskDecompositionError, +}); +export const WsTaskUpdateStatusRpc = Rpc.make(WS_METHODS.taskUpdateStatus, { + payload: TaskUpdateStatusInput, + success: TaskTree, + error: TaskDecompositionError, +}); +export const WsTaskGetTreeRpc = Rpc.make(WS_METHODS.taskGetTree, { + payload: TaskGetTreeInput, + success: TaskTree, + error: TaskDecompositionError, +}); +export const WsTaskListTreesRpc = Rpc.make(WS_METHODS.taskListTrees, { + payload: TaskListTreesInput, + success: TaskListTreesResult, + error: TaskDecompositionError, +}); +export const WsTaskExecuteRpc = Rpc.make(WS_METHODS.taskExecute, { + payload: TaskExecuteInput, + success: TaskTree, + error: TaskDecompositionError, +}); +export const WsSubscribeTaskEventsRpc = Rpc.make(WS_METHODS.subscribeTaskEvents, { + payload: Schema.Struct({}), + success: TaskStreamEvent, + stream: true, +}); + +// --- Memory RPCs --- +export const WsMemoryIndexRpc = Rpc.make(WS_METHODS.memoryIndex, { + payload: MemoryIndexInput, + success: MemoryIndexResult, + error: ProjectMemoryError, +}); +export const WsMemorySearchRpc = Rpc.make(WS_METHODS.memorySearch, { + payload: MemorySearchInput, + success: MemorySearchOutput, + error: ProjectMemoryError, +}); +export const WsMemoryAddRpc = Rpc.make(WS_METHODS.memoryAdd, { + payload: MemoryAddInput, + success: MemoryEntry, + error: ProjectMemoryError, +}); +export const WsMemoryForgetRpc = Rpc.make(WS_METHODS.memoryForget, { + payload: MemoryForgetInput, + error: ProjectMemoryError, +}); +export const WsMemoryListRpc = Rpc.make(WS_METHODS.memoryList, { + payload: MemoryListInput, + success: MemoryListResult, + error: ProjectMemoryError, +}); + +// --- Presence RPCs --- +export const WsPresenceJoinRpc = Rpc.make(WS_METHODS.presenceJoin, { + payload: PresenceJoinInput, + success: Participant, + error: PresenceError, +}); +export const WsPresenceLeaveRpc = Rpc.make(WS_METHODS.presenceLeave, { + payload: PresenceLeaveInput, + error: PresenceError, +}); +export const WsPresenceUpdateCursorRpc = Rpc.make(WS_METHODS.presenceUpdateCursor, { + payload: PresenceUpdateCursorInput, + error: PresenceError, +}); +export const WsPresenceShareRpc = Rpc.make(WS_METHODS.presenceShare, { + payload: PresenceShareInput, + success: SessionShare, + error: PresenceError, +}); +export const WsPresenceGetParticipantsRpc = Rpc.make(WS_METHODS.presenceGetParticipants, { + payload: PresenceGetParticipantsInput, + success: PresenceGetParticipantsResult, + error: PresenceError, +}); +export const WsSubscribePresenceEventsRpc = Rpc.make(WS_METHODS.subscribePresenceEvents, { + payload: Schema.Struct({}), + success: PresenceStreamEvent, + stream: true, +}); + export const WsRpcGroup = RpcGroup.make( WsServerGetConfigRpc, WsServerRefreshProvidersRpc, @@ -356,4 +714,54 @@ export const WsRpcGroup = RpcGroup.make( WsOrchestrationGetTurnDiffRpc, WsOrchestrationGetFullThreadDiffRpc, WsOrchestrationReplayEventsRpc, + // Cost tracking + WsCostGetSummaryRpc, + WsCostSetBudgetRpc, + WsCostGetBudgetsRpc, + WsSubscribeCostEventsRpc, + // Audit log + WsAuditQueryRpc, + WsSubscribeAuditEventsRpc, + // CI/CD + WsCIGetStatusRpc, + WsCITriggerRerunRpc, + WsCISetFeedbackPolicyRpc, + WsSubscribeCIEventsRpc, + // Routing + WsRoutingGetHealthRpc, + WsRoutingSetRulesRpc, + WsRoutingGetRulesRpc, + WsSubscribeRoutingEventsRpc, + // Pipelines + WsPipelineCreateRpc, + WsPipelineListRpc, + WsPipelineExecuteRpc, + WsPipelineGetExecutionRpc, + WsPipelineCancelRpc, + WsSubscribePipelineEventsRpc, + // Workflows + WsWorkflowListRpc, + WsWorkflowCreateRpc, + WsWorkflowDeleteRpc, + WsWorkflowExecuteRpc, + // Tasks + WsTaskDecomposeRpc, + WsTaskUpdateStatusRpc, + WsTaskGetTreeRpc, + WsTaskListTreesRpc, + WsTaskExecuteRpc, + WsSubscribeTaskEventsRpc, + // Memory + WsMemoryIndexRpc, + WsMemorySearchRpc, + WsMemoryAddRpc, + WsMemoryForgetRpc, + WsMemoryListRpc, + // Presence + WsPresenceJoinRpc, + WsPresenceLeaveRpc, + WsPresenceUpdateCursorRpc, + WsPresenceShareRpc, + WsPresenceGetParticipantsRpc, + WsSubscribePresenceEventsRpc, ); diff --git a/packages/contracts/src/taskDecomposition.ts b/packages/contracts/src/taskDecomposition.ts new file mode 100644 index 0000000000..e682c34024 --- /dev/null +++ b/packages/contracts/src/taskDecomposition.ts @@ -0,0 +1,117 @@ +import { Schema } from "effect"; +import { + IsoDateTime, + NonNegativeInt, + ProjectId, + ThreadId, + TrimmedNonEmptyString, +} from "./baseSchemas"; +import { ProviderKind } from "./orchestration"; + +const makeEntityId = (brand: Brand) => + TrimmedNonEmptyString.pipe(Schema.brand(brand)); + +export const TaskId = makeEntityId("TaskId"); +export type TaskId = typeof TaskId.Type; +export const TaskTreeId = makeEntityId("TaskTreeId"); +export type TaskTreeId = typeof TaskTreeId.Type; + +export const TaskStatus = Schema.Literals([ + "pending", + "in-progress", + "completed", + "failed", + "blocked", + "skipped", +]); +export type TaskStatus = typeof TaskStatus.Type; + +export const TaskPriority = Schema.Literals(["low", "medium", "high", "critical"]); +export type TaskPriority = typeof TaskPriority.Type; + +export const TaskComplexity = Schema.Literals(["trivial", "simple", "moderate", "complex"]); +export type TaskComplexity = typeof TaskComplexity.Type; + +export const TaskNode = Schema.Struct({ + id: TaskId, + parentId: Schema.NullOr(TaskId), + title: TrimmedNonEmptyString, + description: Schema.NullOr(Schema.String), + status: TaskStatus, + priority: TaskPriority, + complexity: TaskComplexity, + provider: Schema.optional(ProviderKind), + threadId: Schema.NullOr(ThreadId), + dependsOn: Schema.Array(TaskId), + estimatedTokens: Schema.NullOr(NonNegativeInt), + order: NonNegativeInt, + createdAt: IsoDateTime, + updatedAt: IsoDateTime, + completedAt: Schema.NullOr(IsoDateTime), +}); +export type TaskNode = typeof TaskNode.Type; + +export const TaskTree = Schema.Struct({ + id: TaskTreeId, + projectId: ProjectId, + rootPrompt: TrimmedNonEmptyString, + tasks: Schema.Array(TaskNode), + status: TaskStatus, + createdAt: IsoDateTime, + updatedAt: IsoDateTime, +}); +export type TaskTree = typeof TaskTree.Type; + +export const TaskDecomposeInput = Schema.Struct({ + projectId: ProjectId, + prompt: TrimmedNonEmptyString, + provider: Schema.optional(ProviderKind), + maxDepth: NonNegativeInt.pipe(Schema.withDecodingDefault(() => 3)), +}); +export type TaskDecomposeInput = typeof TaskDecomposeInput.Type; + +export const TaskUpdateStatusInput = Schema.Struct({ + treeId: TaskTreeId, + taskId: TaskId, + status: TaskStatus, +}); +export type TaskUpdateStatusInput = typeof TaskUpdateStatusInput.Type; + +export const TaskGetTreeInput = Schema.Struct({ + treeId: TaskTreeId, +}); +export type TaskGetTreeInput = typeof TaskGetTreeInput.Type; + +export const TaskListTreesInput = Schema.Struct({ + projectId: ProjectId, +}); +export type TaskListTreesInput = typeof TaskListTreesInput.Type; + +export const TaskListTreesResult = Schema.Struct({ + trees: Schema.Array(TaskTree), +}); +export type TaskListTreesResult = typeof TaskListTreesResult.Type; + +export const TaskExecuteInput = Schema.Struct({ + treeId: TaskTreeId, + taskId: Schema.optional(TaskId), +}); +export type TaskExecuteInput = typeof TaskExecuteInput.Type; + +export const TaskStreamEvent = Schema.Union([ + Schema.Struct({ type: Schema.Literal("task.tree.updated"), tree: TaskTree }), + Schema.Struct({ + type: Schema.Literal("task.node.updated"), + treeId: TaskTreeId, + node: TaskNode, + }), +]); +export type TaskStreamEvent = typeof TaskStreamEvent.Type; + +export class TaskDecompositionError extends Schema.TaggedErrorClass()( + "TaskDecompositionError", + { + message: Schema.String, + cause: Schema.optional(Schema.Unknown), + }, +) {} diff --git a/packages/contracts/src/workflows.ts b/packages/contracts/src/workflows.ts new file mode 100644 index 0000000000..c0250f43f5 --- /dev/null +++ b/packages/contracts/src/workflows.ts @@ -0,0 +1,97 @@ +import { Schema } from "effect"; +import { IsoDateTime, NonNegativeInt, ProjectId, TrimmedNonEmptyString } from "./baseSchemas"; +import { ModelSelection, ProviderKind, RuntimeMode } from "./orchestration"; + +const makeEntityId = (brand: Brand) => + TrimmedNonEmptyString.pipe(Schema.brand(brand)); + +export const WorkflowTemplateId = makeEntityId("WorkflowTemplateId"); +export type WorkflowTemplateId = typeof WorkflowTemplateId.Type; +export const WorkflowStepId = makeEntityId("WorkflowStepId"); +export type WorkflowStepId = typeof WorkflowStepId.Type; + +export const WorkflowStepKind = Schema.Literals([ + "prompt", + "shell", + "git-commit", + "git-push", + "create-pr", + "run-tests", + "lint", + "wait-ci", + "conditional", +]); +export type WorkflowStepKind = typeof WorkflowStepKind.Type; + +export const WorkflowVariable = Schema.Struct({ + name: TrimmedNonEmptyString, + description: Schema.NullOr(Schema.String), + defaultValue: Schema.NullOr(Schema.String), + required: Schema.Boolean.pipe(Schema.withDecodingDefault(() => false)), +}); +export type WorkflowVariable = typeof WorkflowVariable.Type; + +export const WorkflowStep = Schema.Struct({ + id: WorkflowStepId, + name: TrimmedNonEmptyString, + kind: WorkflowStepKind, + provider: Schema.optional(ProviderKind), + modelSelection: Schema.optional(ModelSelection), + runtimeMode: Schema.optional(RuntimeMode), + prompt: Schema.NullOr(Schema.String), + command: Schema.NullOr(Schema.String), + condition: Schema.NullOr(Schema.String), + continueOnError: Schema.Boolean.pipe(Schema.withDecodingDefault(() => false)), + timeoutMs: NonNegativeInt.pipe(Schema.withDecodingDefault(() => 120_000)), + dependsOn: Schema.Array(WorkflowStepId), +}); +export type WorkflowStep = typeof WorkflowStep.Type; + +export const WorkflowTemplate = Schema.Struct({ + id: WorkflowTemplateId, + name: TrimmedNonEmptyString, + description: Schema.NullOr(Schema.String), + category: TrimmedNonEmptyString, + variables: Schema.Array(WorkflowVariable), + steps: Schema.Array(WorkflowStep), + isBuiltIn: Schema.Boolean.pipe(Schema.withDecodingDefault(() => false)), + createdAt: IsoDateTime, + updatedAt: IsoDateTime, +}); +export type WorkflowTemplate = typeof WorkflowTemplate.Type; + +export const WorkflowListInput = Schema.Struct({ + category: Schema.optional(TrimmedNonEmptyString), +}); +export type WorkflowListInput = typeof WorkflowListInput.Type; + +export const WorkflowListResult = Schema.Struct({ + templates: Schema.Array(WorkflowTemplate), +}); +export type WorkflowListResult = typeof WorkflowListResult.Type; + +export const WorkflowCreateInput = Schema.Struct({ + name: TrimmedNonEmptyString, + description: Schema.NullOr(Schema.String), + category: TrimmedNonEmptyString, + variables: Schema.Array(WorkflowVariable), + steps: Schema.Array(WorkflowStep), +}); +export type WorkflowCreateInput = typeof WorkflowCreateInput.Type; + +export const WorkflowDeleteInput = Schema.Struct({ + templateId: WorkflowTemplateId, +}); +export type WorkflowDeleteInput = typeof WorkflowDeleteInput.Type; + +export const WorkflowExecuteInput = Schema.Struct({ + templateId: WorkflowTemplateId, + projectId: ProjectId, + variables: Schema.Record(Schema.String, Schema.String), +}); +export type WorkflowExecuteInput = typeof WorkflowExecuteInput.Type; + +export class WorkflowError extends Schema.TaggedErrorClass()("WorkflowError", { + message: Schema.String, + cause: Schema.optional(Schema.Unknown), +}) {}