diff --git a/DEVELOPMENT.md b/DEVELOPMENT.md index 3c34229830..5fb3a00abb 100644 --- a/DEVELOPMENT.md +++ b/DEVELOPMENT.md @@ -176,6 +176,22 @@ All tests should pass against the local PostgreSQL database. | `pnpm stripe` | Start Stripe webhook forwarding to localhost | | `pnpm test:e2e` | Run Playwright end-to-end tests | +## Dev CLI + +The repo includes a Bun-based dev CLI that manages service dependencies and selectively starts what you need: + +```bash +pnpm kilo up # Start Next.js + Postgres + Redis + migrations +pnpm kilo up kiloclaw # Start KiloClaw + all its dependencies +pnpm kilo up cloud-agent # Start Cloud Agent + dependencies +pnpm kilo status # Check what's running +pnpm kilo env check # Validate all .dev.vars files +pnpm kilo down # Stop Docker infrastructure +pnpm kilo logs # List all available services +``` + +The CLI resolves service dependencies automatically — `pnpm kilo up kiloclaw` will start Postgres, Redis, run migrations, start Next.js, then start the KiloClaw worker. + ## Git Workflow - Direct commits to `main` are blocked by a pre-commit hook. Always work on a feature branch. diff --git a/dev/cli/bun.lock b/dev/cli/bun.lock new file mode 100644 index 0000000000..595d63a9ce --- /dev/null +++ b/dev/cli/bun.lock @@ -0,0 +1,19 @@ +{ + "lockfileVersion": 1, + "configVersion": 1, + "workspaces": { + "": { + "name": "@kilocode/dev-cli", + "devDependencies": { + "bun-types": "^1.3.10", + }, + }, + }, + "packages": { + "@types/node": ["@types/node@25.5.0", "", { "dependencies": { "undici-types": "~7.18.0" } }, "sha512-jp2P3tQMSxWugkCUKLRPVUpGaL5MVFwF8RDuSRztfwgN1wmqJeMSbKlnEtQqU8UrhTmzEmZdu2I6v2dpp7XIxw=="], + + "bun-types": ["bun-types@1.3.10", "", { "dependencies": { "@types/node": "*" } }, "sha512-tcpfCCl6XWo6nCVnpcVrxQ+9AYN1iqMIzgrSKYMB/fjLtV2eyAVEg7AxQJuCq/26R6HpKWykQXuSOq/21RYcbg=="], + + "undici-types": ["undici-types@7.18.2", "", {}, "sha512-AsuCzffGHJybSaRrmr5eHr81mwJU3kjw6M+uprWvCXiNeN9SOGwQ3Jn8jb8m3Z6izVgknn1R0FTCEAP2QrLY/w=="], + } +} diff --git a/dev/cli/package.json b/dev/cli/package.json new file mode 100644 index 0000000000..766845b921 --- /dev/null +++ b/dev/cli/package.json @@ -0,0 +1,9 @@ +{ + "name": "@kilocode/dev-cli", + "version": "0.0.1", + "private": true, + "type": "module", + "devDependencies": { + "bun-types": "^1.3.10" + } +} diff --git a/dev/cli/src/commands/down.ts b/dev/cli/src/commands/down.ts new file mode 100644 index 0000000000..dacf74104b --- /dev/null +++ b/dev/cli/src/commands/down.ts @@ -0,0 +1,8 @@ +import * as docker from '../infra/docker'; +import * as ui from '../utils/ui'; + +export async function down(root: string) { + ui.header('Stopping services'); + await docker.stopAll(root); + ui.success('Docker services stopped'); +} diff --git a/dev/cli/src/commands/env.ts b/dev/cli/src/commands/env.ts new file mode 100644 index 0000000000..9aee5298ac --- /dev/null +++ b/dev/cli/src/commands/env.ts @@ -0,0 +1,63 @@ +import { services } from '../services/registry'; +import { parseEnvFile, findMissingVars } from '../utils/env'; +import * as ui from '../utils/ui'; +import { join } from 'path'; + +export async function envCheck(root: string) { + ui.header('Environment Variable Check'); + + const envLocalPath = join(root, '.env.local'); + const envLocalExists = await Bun.file(envLocalPath).exists(); + if (envLocalExists) { + ui.success('.env.local exists'); + } else { + ui.error('.env.local missing — run: vercel env pull'); + } + + const vercelProjectPath = join(root, '.vercel', 'project.json'); + const vercelLinked = await Bun.file(vercelProjectPath).exists(); + if (vercelLinked) { + ui.success('Vercel project linked'); + } else { + ui.warn('Vercel project not linked — run: vercel link --project kilocode-app'); + } + + let allGood = envLocalExists && vercelLinked; + + const servicesWithEnv = services.filter(s => s.envFile); + + for (const svc of servicesWithEnv) { + const examplePath = join(root, svc.dir, svc.envFile!); + const actualPath = join(root, svc.dir, '.dev.vars'); + + const exampleExists = await Bun.file(examplePath).exists(); + const actualExists = await Bun.file(actualPath).exists(); + + if (!actualExists) { + ui.warn(`${svc.name}: .dev.vars missing (copy from ${svc.envFile})`); + allGood = false; + continue; + } + + if (exampleExists) { + const exampleContent = await Bun.file(examplePath).text(); + const actualContent = await Bun.file(actualPath).text(); + const example = parseEnvFile(exampleContent); + const actual = parseEnvFile(actualContent); + const missing = findMissingVars(example, actual); + + if (missing.length > 0) { + ui.warn(`${svc.name}: placeholder values: ${missing.join(', ')}`); + allGood = false; + } else { + ui.success(`${svc.name}: .dev.vars OK`); + } + } + } + + if (allGood) { + console.log(`\n ${ui.green('All environment checks passed!')}\n`); + } else { + console.log(`\n ${ui.yellow('Some checks need attention (see above)')}\n`); + } +} diff --git a/dev/cli/src/commands/logs.ts b/dev/cli/src/commands/logs.ts new file mode 100644 index 0000000000..57009728dc --- /dev/null +++ b/dev/cli/src/commands/logs.ts @@ -0,0 +1,36 @@ +import { services } from '../services/registry'; +import * as ui from '../utils/ui'; + +export async function logs(args: string[], root: string) { + if (args.length === 0) { + ui.header('Available services'); + for (const svc of services) { + const portInfo = svc.port ? ` (port ${svc.port})` : ''; + console.log(` ${svc.name.padEnd(20)} ${ui.dim(svc.description)}${portInfo}`); + } + return; + } + + const name = args[0]; + const svc = services.find(s => s.name === name); + if (!svc) { + ui.error(`Unknown service: "${name}"`); + return; + } + + if (svc.type === 'infra' && svc.devCommand?.startsWith('docker compose')) { + const proc = Bun.spawn( + ['docker', 'compose', '-f', 'dev/docker-compose.yml', 'logs', '-f', svc.name], + { stdout: 'inherit', stderr: 'inherit', cwd: root } + ); + await proc.exited; + } else if (svc.type === 'infra') { + ui.warn( + `"${svc.name}" is not a Docker Compose service (runs: ${svc.devCommand ?? 'n/a'}).\n It does not produce persistent logs that can be tailed.` + ); + } else { + ui.warn( + `Log tailing for running dev servers is not yet supported.\n Start the service with 'pnpm kilo dev up ${name}' to see its output.` + ); + } +} diff --git a/dev/cli/src/commands/status.ts b/dev/cli/src/commands/status.ts new file mode 100644 index 0000000000..ec681e2a4e --- /dev/null +++ b/dev/cli/src/commands/status.ts @@ -0,0 +1,46 @@ +import { services } from '../services/registry'; +import * as docker from '../infra/docker'; +import * as ui from '../utils/ui'; + +export async function status(root: string) { + ui.header('Service Status'); + + const pgHealthy = await docker.isHealthy(root, 'postgres'); + const redisHealthy = await docker.isHealthy(root, 'redis'); + + console.log( + ` ${pgHealthy ? ui.green('●') : ui.red('●')} postgres ${pgHealthy ? 'running' : 'stopped'}` + ); + console.log( + ` ${redisHealthy ? ui.green('●') : ui.red('●')} redis ${redisHealthy ? 'running' : 'stopped'}` + ); + + const portServices = services.filter(s => s.port && s.type !== 'infra'); + for (const svc of portServices) { + const listening = await isPortListening(svc.port!); + console.log( + ` ${listening ? ui.green('●') : ui.dim('○')} ${svc.name.padEnd(12)} ${listening ? `port ${svc.port}` : ui.dim('not running')}` + ); + } + + console.log(); +} + +async function isPortListening(port: number): Promise { + try { + const socket = await Bun.connect({ + hostname: '127.0.0.1', + port, + socket: { + data() {}, + open(s) { + s.end(); + }, + error() {}, + }, + }); + return true; + } catch { + return false; + } +} diff --git a/dev/cli/src/commands/tunnel.ts b/dev/cli/src/commands/tunnel.ts new file mode 100644 index 0000000000..be691344d2 --- /dev/null +++ b/dev/cli/src/commands/tunnel.ts @@ -0,0 +1,29 @@ +import { startQuickTunnel, startNamedTunnel, updateDevVarsUrl } from '../infra/tunnel'; +import * as ui from '../utils/ui'; +import { join } from 'path'; + +export async function tunnel(args: string[], root: string) { + const nameIdx = args.indexOf('--name'); + const tunnelName = nameIdx !== -1 ? args[nameIdx + 1] : undefined; + const port = 3000; + + if (tunnelName) { + ui.header(`Starting named tunnel: ${tunnelName}`); + startNamedTunnel(tunnelName); + } else { + ui.header('Starting quick tunnel'); + const result = await startQuickTunnel(port); + if (result.url) { + ui.success(`Tunnel URL: ${result.url}`); + const devVarsPath = join(root, 'kiloclaw', '.dev.vars'); + if (await Bun.file(devVarsPath).exists()) { + await updateDevVarsUrl(devVarsPath, result.url); + } + } else { + ui.warn('Could not capture tunnel URL within 30s'); + ui.warn('Check cloudflared output and manually update .dev.vars'); + } + } + + await new Promise(() => {}); +} diff --git a/dev/cli/src/commands/up.ts b/dev/cli/src/commands/up.ts new file mode 100644 index 0000000000..770a00e506 --- /dev/null +++ b/dev/cli/src/commands/up.ts @@ -0,0 +1,78 @@ +import { resolve } from '../services/resolver'; +import { getServiceNames, type ServiceDef } from '../services/registry'; +import * as docker from '../infra/docker'; +import { spawnService, run } from '../utils/process'; +import * as ui from '../utils/ui'; +import { join } from 'path'; + +export async function up(args: string[], root: string) { + const targets = args.length > 0 ? args : ['nextjs']; + + const validNames = getServiceNames(); + for (const name of targets) { + if (!validNames.includes(name)) { + ui.error(`Unknown service: "${name}"`); + console.log(`\nAvailable services: ${validNames.join(', ')}`); + process.exit(1); + } + } + + const plan = resolve(targets); + + ui.header('Starting services'); + console.log(` ${plan.map(s => s.name).join(' → ')}\n`); + + const infraServices = plan.filter(s => s.type === 'infra'); + const appServices = plan.filter(s => s.type !== 'infra'); + + for (const svc of infraServices) { + await startInfraService(svc, root); + } + + if (appServices.length === 0) { + ui.success('Infrastructure is ready. No app services to start.'); + return; + } + + ui.header('Starting dev servers'); + + for (const svc of appServices) { + if (!svc.devCommand) continue; + const cwd = join(root, svc.dir); + const portInfo = svc.port ? ` (port ${svc.port})` : ''; + console.log(` Starting ${ui.bold(svc.name)}${portInfo}...`); + spawnService({ name: svc.name, command: svc.devCommand, cwd }); + } + + console.log(`\n ${ui.dim('Press Ctrl+C to stop all services')}\n`); + await new Promise(() => {}); +} + +async function startInfraService(svc: ServiceDef, root: string) { + ui.header(`Starting ${svc.name}`); + + if (!svc.devCommand) return; + + const ok = await run({ + command: svc.devCommand, + cwd: join(root, svc.dir), + label: svc.devCommand, + }); + + if (!ok) { + ui.error(`Failed to start ${svc.name}`); + process.exit(1); + } + + if (svc.name === 'postgres' || svc.name === 'redis') { + console.log(` Waiting for ${svc.name} to be healthy...`); + const healthy = await docker.waitForHealthy(root, svc.name); + if (healthy) { + ui.success(`${svc.name} is ready`); + } else { + ui.warn(`${svc.name} health check timed out — continuing anyway`); + } + } else { + ui.success(`${svc.name} complete`); + } +} diff --git a/dev/cli/src/index.ts b/dev/cli/src/index.ts new file mode 100644 index 0000000000..9c76b3e431 --- /dev/null +++ b/dev/cli/src/index.ts @@ -0,0 +1,146 @@ +#!/usr/bin/env bun +import { resolve as resolvePath } from 'path'; +import { up } from './commands/up'; +import { down } from './commands/down'; +import { status } from './commands/status'; +import { envCheck } from './commands/env'; +import { tunnel } from './commands/tunnel'; +import { logs } from './commands/logs'; +import { getServiceNames } from './services/registry'; +import { getProject, getProjectNames, projects } from './projects/index'; +import * as ui from './utils/ui'; + +const ROOT = resolvePath(import.meta.dir, '..', '..', '..'); + +const args = process.argv.slice(2); + +// Support both `kilo dev up` and `kilo up` (skip "dev" if present) +let command = args[0]; +let commandArgs = args.slice(1); +if (command === 'dev') { + command = args[1]; + commandArgs = args.slice(2); +} + +async function main() { + // Check if command is a project name with a subcommand + // e.g. `kilo kiloclaw setup` or `kilo code-review up` + if (command) { + const project = getProject(command); + if (project) { + const subcommand = commandArgs[0]; + const subArgs = commandArgs.slice(1); + + if (subcommand && project.commands[subcommand]) { + await project.commands[subcommand].run(subArgs, ROOT); + return; + } + + // Unknown subcommand — show error + project help + if (subcommand) { + ui.error(`Unknown command "${subcommand}" for project "${command}"`); + console.log(); + } + + printProjectHelp(project); + return; + } + } + + switch (command) { + case 'up': + await up(commandArgs, ROOT); + break; + + case 'down': + await down(ROOT); + break; + + case 'status': + await status(ROOT); + break; + + case 'env': + await envCheck(ROOT); + break; + + case 'tunnel': + await tunnel(commandArgs, ROOT); + break; + + case 'logs': + case 'ls': + await logs(commandArgs, ROOT); + break; + + case 'help': + case '--help': + case '-h': + case undefined: + printHelp(); + break; + + default: + // Maybe they typed a service name directly? e.g. `kilo kiloclaw` + if (getServiceNames().includes(command!)) { + await up([command!, ...commandArgs], ROOT); + } else { + ui.error(`Unknown command: "${command}"`); + printHelp(); + process.exit(1); + } + } +} + +function printProjectHelp(project: import('./projects/types').ProjectDef) { + const cmds = Object.entries(project.commands); + console.log(` +${ui.bold(project.name)} — ${project.description} + +${ui.bold('Commands:')} +${cmds.map(([name, cmd]) => ` ${name.padEnd(20)} ${cmd.description}`).join('\n')} + +${ui.bold('Usage:')} + pnpm kilo ${project.name} [options] +`); +} + +function printHelp() { + const projectList = projects.map(p => ` ${p.name.padEnd(20)} ${p.description}`).join('\n'); + + console.log(` +${ui.bold('kilo dev')} — Local development CLI + +${ui.bold('Usage:')} + pnpm kilo [options] + +${ui.bold('Commands:')} + up [services...] Start services (default: nextjs + infra) + down Stop Docker infrastructure (Ctrl+C stops dev servers) + status Show status of all services + env check Validate environment variables + tunnel [--name N] Start a cloudflared tunnel + logs [service] Tail service logs (or list services) + +${ui.bold('Projects:')} +${projectList} + +${ui.bold('Examples:')} + pnpm kilo up Start Next.js + Postgres + Redis + pnpm kilo up kiloclaw Start KiloClaw + all its dependencies + pnpm kilo kiloclaw setup KiloClaw-specific setup (Fly token, secrets) + pnpm kilo kiloclaw push-dev Build + push controller Docker image + pnpm kilo code-review up Start code review dev environment + pnpm kilo app-builder up Start app builder tmux session + pnpm kilo status Check what's running + pnpm kilo env check Validate all .dev.vars files + +${ui.bold('Services:')} + ${getServiceNames().join(', ')} +`); +} + +main().catch(err => { + ui.error(err.message); + process.exit(1); +}); diff --git a/dev/cli/src/infra/docker.ts b/dev/cli/src/infra/docker.ts new file mode 100644 index 0000000000..c9a4f8380d --- /dev/null +++ b/dev/cli/src/infra/docker.ts @@ -0,0 +1,65 @@ +import { run } from '../utils/process'; + +const COMPOSE_FILE = 'dev/docker-compose.yml'; + +export async function startService(root: string, service: string): Promise { + return run({ + command: `docker compose -f ${COMPOSE_FILE} up -d ${service}`, + cwd: root, + label: `docker compose up -d ${service}`, + }); +} + +export async function stopAll(root: string): Promise { + return run({ + command: `docker compose -f ${COMPOSE_FILE} down`, + cwd: root, + label: 'docker compose down', + }); +} + +export async function isHealthy(root: string, service: string): Promise { + try { + if (service === 'postgres') { + const proc = Bun.spawn( + [ + 'docker', + 'compose', + '-f', + COMPOSE_FILE, + 'exec', + '-T', + 'postgres', + 'pg_isready', + '-U', + 'postgres', + ], + { stdout: 'pipe', stderr: 'pipe', cwd: root } + ); + return (await proc.exited) === 0; + } + if (service === 'redis') { + const proc = Bun.spawn( + ['docker', 'compose', '-f', COMPOSE_FILE, 'exec', '-T', 'redis', 'redis-cli', 'ping'], + { stdout: 'pipe', stderr: 'pipe', cwd: root } + ); + return (await proc.exited) === 0; + } + return false; + } catch { + return false; + } +} + +export async function waitForHealthy( + root: string, + service: string, + timeoutMs = 30_000 +): Promise { + const start = Date.now(); + while (Date.now() - start < timeoutMs) { + if (await isHealthy(root, service)) return true; + await Bun.sleep(500); + } + return false; +} diff --git a/dev/cli/src/infra/migrations.ts b/dev/cli/src/infra/migrations.ts new file mode 100644 index 0000000000..6c8c36bea2 --- /dev/null +++ b/dev/cli/src/infra/migrations.ts @@ -0,0 +1,9 @@ +import { run } from '../utils/process'; + +export async function runMigrations(root: string): Promise { + return run({ + command: 'pnpm drizzle migrate', + cwd: root, + label: 'drizzle migrate', + }); +} diff --git a/dev/cli/src/infra/tunnel.ts b/dev/cli/src/infra/tunnel.ts new file mode 100644 index 0000000000..67dd5b5ab0 --- /dev/null +++ b/dev/cli/src/infra/tunnel.ts @@ -0,0 +1,88 @@ +import * as ui from '../utils/ui'; +import { type Subprocess } from 'bun'; + +export interface TunnelResult { + process: Subprocess; + url?: string; +} + +export async function startQuickTunnel(port: number): Promise { + const command = `cloudflared tunnel --url http://localhost:${port}`; + const proc = Bun.spawn(['sh', '-c', command], { + stdout: 'pipe', + stderr: 'pipe', + env: process.env, + }); + + const url = await captureUrl(proc.stderr, 30_000); + return { process: proc, url: url ?? undefined }; +} + +export function startNamedTunnel(name: string): Subprocess { + return Bun.spawn(['sh', '-c', `cloudflared tunnel run ${name}`], { + stdout: 'inherit', + stderr: 'inherit', + env: process.env, + }); +} + +async function captureUrl( + stream: ReadableStream | null, + timeoutMs: number +): Promise { + if (!stream) return null; + + const reader = stream.getReader(); + const decoder = new TextDecoder(); + const start = Date.now(); + let buffer = ''; + + while (Date.now() - start < timeoutMs) { + const result = await Promise.race([ + reader.read(), + Bun.sleep(timeoutMs - (Date.now() - start)).then(() => ({ + done: true as const, + value: undefined, + })), + ]); + + if (result.value) { + const text = decoder.decode(result.value, { stream: true }); + buffer += text; + process.stderr.write(text); + const match = buffer.match(/https:\/\/[a-z0-9-]+\.trycloudflare\.com/); + if (match) { + pipeRemainingStderr(reader); + return match[0]; + } + } + if (result.done) break; + } + + return null; +} + +async function pipeRemainingStderr(reader: ReadableStreamDefaultReader) { + const decoder = new TextDecoder(); + while (true) { + const { done, value } = await reader.read(); + if (done) break; + process.stderr.write(decoder.decode(value, { stream: true })); + } +} + +export async function updateDevVarsUrl(devVarsPath: string, tunnelUrl: string): Promise { + const apiUrl = `${tunnelUrl}/api/gateway/`; + const file = Bun.file(devVarsPath); + let content = await file.text(); + + const pattern = /^(#\s*)?KILOCODE_API_BASE_URL=.*/m; + if (pattern.test(content)) { + content = content.replace(pattern, `KILOCODE_API_BASE_URL=${apiUrl}`); + } else { + content += `\nKILOCODE_API_BASE_URL=${apiUrl}\n`; + } + + await Bun.write(devVarsPath, content); + ui.success(`Set KILOCODE_API_BASE_URL=${apiUrl}`); +} diff --git a/dev/cli/src/projects/app-builder.ts b/dev/cli/src/projects/app-builder.ts new file mode 100644 index 0000000000..47900cbe9b --- /dev/null +++ b/dev/cli/src/projects/app-builder.ts @@ -0,0 +1,197 @@ +import type { ProjectDef } from './types'; +import { run } from '../utils/process'; +import * as ui from '../utils/ui'; +import { join } from 'path'; + +const SESSION = 'app-builder-dev'; + +const SERVICES = [ + { + pane: 0, + title: 'db-proxy (8792)', + dir: 'cloudflare-db-proxy', + cmd: 'pnpm exec wrangler dev --inspector-port 9230', + url: 'http://localhost:8792', + }, + { + pane: 1, + title: 'session-ingest (8787)', + dir: 'cloudflare-session-ingest', + cmd: 'pnpm exec wrangler dev --inspector-port 9233', + url: 'http://localhost:8787', + }, + { + pane: 2, + title: 'cloud-agent (8788)', + dir: 'cloud-agent', + cmd: 'pnpm exec wrangler dev --inspector-port 9231', + url: 'http://localhost:8788', + }, + { + pane: 3, + title: 'images-mcp (8796)', + dir: 'cloudflare-images-mcp', + cmd: 'pnpm exec wrangler dev --env dev --inspector-port 9236', + url: 'http://localhost:8796', + }, + { + pane: 4, + title: 'webhook-ingest (8793)', + dir: 'cloudflare-webhook-agent-ingest', + cmd: 'pnpm exec wrangler dev --env dev --inspector-port 9237', + url: 'http://localhost:8793', + }, + { + pane: 5, + title: 'git-token (8795)', + dir: 'cloudflare-git-token-service', + cmd: 'pnpm exec wrangler dev --inspector-port 9235', + url: 'http://localhost:8795', + }, + { + pane: 6, + title: 'app-builder (8790)', + dir: 'cloudflare-app-builder', + cmd: 'pnpm exec wrangler dev --inspector-port 9232', + url: 'http://localhost:8790', + }, + { + pane: 7, + title: 'ngrok -> 8790', + dir: '.', + cmd: 'ngrok http 8790', + url: 'forwarding to :8790', + }, + { + pane: 8, + title: 'cloud-agent-next (8794)', + dir: 'cloud-agent-next', + cmd: 'pnpm run dev', + url: 'http://localhost:8794', + }, +] as const; + +function tmux(args: string[]): boolean { + const proc = Bun.spawnSync(['tmux', ...args]); + return proc.exitCode === 0; +} + +function sessionExists(): boolean { + return Bun.spawnSync(['tmux', 'has-session', '-t', SESSION]).exitCode === 0; +} + +async function upCommand(args: string[], root: string): Promise { + const restart = args.includes('--restart') || args.includes('-r'); + + // Check dependencies + const hasTmux = await run({ command: 'command -v tmux', cwd: root, label: 'check tmux' }); + if (!hasTmux) { + ui.error('tmux is required but not installed. Install it with: brew install tmux'); + process.exit(1); + } + + const hasNgrok = await run({ command: 'command -v ngrok', cwd: root, label: 'check ngrok' }); + if (!hasNgrok) { + ui.error('ngrok is required but not installed. Install it from: https://ngrok.com/download'); + process.exit(1); + } + + // Handle existing session + if (sessionExists()) { + if (restart) { + ui.warn('Restarting existing session...'); + tmux(['kill-session', '-t', SESSION]); + } else { + console.log( + `\n${ui.bold('Attaching to existing session...')} ${ui.dim('(use --restart to start fresh)')}\n` + ); + const proc = Bun.spawn(['tmux', 'attach', '-t', SESSION], { stdio: 'inherit' }); + await proc.exited; + return; + } + } + + ui.header('Starting App Builder Dev Environment'); + + // Create the tmux session + tmux(['new-session', '-d', '-s', SESSION, '-n', 'services', '-c', root]); + + // Build up 9 panes by splitting: + // Split once vertically (creates pane 1 below pane 0) + tmux(['split-window', '-v', '-t', `${SESSION}:services`, '-c', root]); + // Split pane 0 horizontally four times (top row: panes 0-4) + tmux(['split-window', '-h', '-t', `${SESSION}:services.0`, '-c', root]); + tmux(['split-window', '-h', '-t', `${SESSION}:services.0`, '-c', root]); + tmux(['split-window', '-h', '-t', `${SESSION}:services.0`, '-c', root]); + tmux(['split-window', '-h', '-t', `${SESSION}:services.0`, '-c', root]); + // Split pane 5 horizontally three times (bottom row: panes 5-8) + tmux(['split-window', '-h', '-t', `${SESSION}:services.5`, '-c', root]); + tmux(['split-window', '-h', '-t', `${SESSION}:services.5`, '-c', root]); + tmux(['split-window', '-h', '-t', `${SESSION}:services.5`, '-c', root]); + + // Arrange in tiled layout + tmux(['select-layout', '-t', `${SESSION}:services`, 'tiled']); + + // Configure pane borders + tmux(['set-option', '-t', SESSION, 'pane-border-status', 'top']); + tmux(['set-option', '-t', SESSION, 'pane-border-format', ' #{pane_index}: #{pane_title} ']); + tmux(['set-option', '-t', SESSION, 'allow-set-title', 'off']); + + // Set pane titles and send commands + for (const svc of SERVICES) { + const paneTarget = `${SESSION}:services.${svc.pane}`; + tmux(['select-pane', '-t', paneTarget, '-T', svc.title]); + + const dir = svc.dir === '.' ? root : join(root, svc.dir); + tmux(['send-keys', '-t', paneTarget, `cd "${dir}" && ${svc.cmd}`, 'C-m']); + } + + // Select the ngrok pane by default + tmux(['select-pane', '-t', `${SESSION}:services.7`]); + + // Print summary + console.log(` +${ui.bold(ui.cyan('App Builder Dev Environment Started!'))} + +${ui.bold('Services:')} +`); + ui.table([ + ['db-proxy', 'http://localhost:8792'], + ['session-ingest', 'http://localhost:8787'], + ['cloud-agent', 'http://localhost:8788'], + ['cloud-agent-next', 'http://localhost:8794'], + ['git-token-service', 'http://localhost:8795'], + ['app-builder', 'http://localhost:8790'], + ['images-mcp', 'http://localhost:8796'], + ['webhook-agent-ingest', 'http://localhost:8793'], + ['ngrok', 'forwarding to :8790'], + ]); + + console.log(` +${ui.bold('tmux Navigation:')} + ${ui.dim('Switch panes: Ctrl+b then arrow keys')} + ${ui.dim('Scroll mode: Ctrl+b then [ (use arrows/PgUp/PgDn, q=exit)')} + ${ui.dim('Detach: Ctrl+b then d')} + ${ui.dim('Zoom pane: Ctrl+b then z (toggle fullscreen pane)')} + +${ui.bold('Session Commands:')} + ${ui.dim(`Attach: tmux attach -t ${SESSION}`)} + ${ui.dim(`Kill: tmux kill-session -t ${SESSION}`)} +`); + + // Attach to the session + const proc = Bun.spawn(['tmux', 'attach', '-t', SESSION], { stdio: 'inherit' }); + await proc.exited; +} + +export const appBuilder: ProjectDef = { + name: 'app-builder', + description: 'App builder Cloudflare Workers dev environment (tmux session)', + commands: { + up: { + description: + 'Start all app-builder services in a tmux session (--restart to force fresh start)', + run: upCommand, + }, + }, +}; diff --git a/dev/cli/src/projects/auto-fix.ts b/dev/cli/src/projects/auto-fix.ts new file mode 100644 index 0000000000..8e86e18461 --- /dev/null +++ b/dev/cli/src/projects/auto-fix.ts @@ -0,0 +1,193 @@ +import type { ProjectDef } from './types'; +import { spawnService, run } from '../utils/process'; +import * as ui from '../utils/ui'; +import { join } from 'path'; +import { createHmac, randomUUID } from 'crypto'; + +const GENERIC_BODY = JSON.stringify( + { + action: 'created', + comment: { + body: 'PLACEHOLDER: Replace with real comment', + }, + pull_request: { + number: 123, + title: 'PLACEHOLDER: Replace with real PR title', + body: 'PLACEHOLDER: Replace with real PR body', + state: 'open', + draft: false, + html_url: 'https://github.com/OWNER/REPO/pull/123', + user: { + id: 1, + login: 'octocat', + avatar_url: 'https://github.com/images/error/octocat_happy.gif', + }, + head: { + sha: '1111111111111111111111111111111111111111', + ref: 'feature/placeholder', + repo: { full_name: 'OWNER/REPO' }, + }, + base: { + sha: '2222222222222222222222222222222222222222', + ref: 'main', + }, + }, + repository: { + id: 1, + name: 'REPO', + full_name: 'OWNER/REPO', + private: false, + owner: { login: 'OWNER' }, + }, + installation: { id: 12345678 }, + sender: { login: 'octocat' }, + }, + null, + 2 +); + +async function upCommand(args: string[], root: string): Promise { + const skipRoot = args.includes('--no-root'); + + const logDir = join(root, 'dev', '.dev-logs', 'auto-fix'); + await Bun.write(join(logDir, '.gitkeep'), ''); + + ui.header('Kilo Cloud Dev Services — Auto Fix'); + console.log(` ${ui.dim(`Logs → ${logDir}/`)}\n`); + + if (!skipRoot) { + console.log(` Starting ${ui.bold('root')} (Next.js, port 3000)...`); + spawnService({ + name: 'root', + command: 'pnpm dev', + cwd: root, + }); + } + + console.log(` Starting ${ui.bold('session')} (Session Worker, inspector 9230)...`); + spawnService({ + name: 'session', + command: 'pnpm exec wrangler dev --inspector-port 9230', + cwd: join(root, 'cloudflare-session-ingest'), + }); + + console.log(` Starting ${ui.bold('auto-fix')} (Auto Fix Worker, inspector 9231)...`); + spawnService({ + name: 'auto-fix', + command: 'pnpm exec wrangler dev --inspector-port 9231', + cwd: join(root, 'cloudflare-auto-fix-infra'), + }); + + console.log(` Starting ${ui.bold('agent-next')} (Agent Next Worker, inspector 9232)...`); + const agentNextDir = join(root, 'cloud-agent-next'); + const buildOk = await run({ + command: 'pnpm run build:wrapper', + cwd: agentNextDir, + label: 'agent-next: build:wrapper', + }); + if (!buildOk) { + ui.error('agent-next build:wrapper failed — aborting'); + process.exit(1); + } + spawnService({ + name: 'agent-next', + command: 'pnpm exec wrangler dev --env dev --inspector-port 9232', + cwd: agentNextDir, + }); + + console.log(`\n ${ui.dim('Press Ctrl+C to stop all services')}\n`); + await new Promise(() => {}); +} + +async function testWebhookCommand(args: string[], _root: string): Promise { + const WEBHOOK_URL = process.env.WEBHOOK_URL ?? 'http://127.0.0.1:3000/api/webhooks/github'; + const WEBHOOK_SECRET = process.env.WEBHOOK_SECRET ?? 'dausigdb781g287d9asgd9721dsa'; + const DEFAULT_EVENT_TYPE = 'pull_request_review_comment'; + + const payloadFile = args[0]; + let rawBody: string; + let payloadSource: string; + + if (payloadFile === '-') { + const chunks: Uint8Array[] = []; + for await (const chunk of Bun.stdin.stream()) { + chunks.push(chunk); + } + rawBody = Buffer.concat(chunks).toString('utf-8'); + payloadSource = 'stdin'; + } else if (payloadFile) { + rawBody = await Bun.file(payloadFile).text(); + payloadSource = payloadFile; + } else { + rawBody = GENERIC_BODY; + payloadSource = 'embedded generic payload'; + } + + // Detect event from envelope payload + let parsed: Record; + try { + parsed = JSON.parse(rawBody); + } catch { + parsed = {}; + } + + let finalEventType: string; + if (process.env.EVENT_TYPE) { + finalEventType = process.env.EVENT_TYPE; + } else if (parsed && typeof parsed.event === 'string') { + finalEventType = parsed.event; + } else { + finalEventType = DEFAULT_EVENT_TYPE; + } + + // Unwrap envelope payloads like {event: "...", payload: {...}} + const body = + parsed && typeof parsed.payload === 'object' && parsed.payload !== null + ? JSON.stringify(parsed.payload) + : rawBody; + + const signature = 'sha256=' + createHmac('sha256', WEBHOOK_SECRET).update(body).digest('hex'); + + const deliveryId = randomUUID(); + + console.log(`Delivery ID: ${deliveryId}`); + console.log(`Event: ${finalEventType}`); + console.log(`URL: ${WEBHOOK_URL}`); + console.log(`Payload source: ${payloadSource}`); + console.log(`Signature: ${signature}`); + console.log(); + console.log('Sending webhook...'); + console.log(); + + const response = await fetch(WEBHOOK_URL, { + method: 'POST', + headers: { + 'Content-Type': 'application/json', + 'x-github-event': finalEventType, + 'x-github-delivery': deliveryId, + 'x-hub-signature-256': signature, + }, + body, + }); + + const responseText = await response.text(); + console.log(responseText); + console.log(`HTTP Status: ${response.status}`); + console.log(); + console.log('Done.'); +} + +export const autoFix: ProjectDef = { + name: 'auto-fix', + description: 'Auto-fix dev environment (workers + Next.js)', + commands: { + up: { + description: 'Start auto-fix dev environment (Next.js + session/auto-fix/agent-next workers)', + run: upCommand, + }, + 'test-webhook': { + description: 'Send a test GitHub pull_request_review_comment webhook to the local dev server', + run: testWebhookCommand, + }, + }, +}; diff --git a/dev/cli/src/projects/code-review.ts b/dev/cli/src/projects/code-review.ts new file mode 100644 index 0000000000..5212e66b40 --- /dev/null +++ b/dev/cli/src/projects/code-review.ts @@ -0,0 +1,192 @@ +import type { ProjectDef } from './types'; +import { spawnService, run } from '../utils/process'; +import * as ui from '../utils/ui'; +import { join } from 'path'; +import { createHmac, randomUUID } from 'crypto'; + +const GENERIC_BODY = JSON.stringify( + { + action: 'opened', + number: 123, + pull_request: { + number: 123, + title: 'PLACEHOLDER: Replace with real PR title', + body: 'PLACEHOLDER: Replace with real PR body', + state: 'open', + draft: false, + html_url: 'https://github.com/OWNER/REPO/pull/123', + user: { + id: 1, + login: 'octocat', + avatar_url: 'https://github.com/images/error/octocat_happy.gif', + }, + head: { + sha: '1111111111111111111111111111111111111111', + ref: 'feature/placeholder', + repo: { full_name: 'OWNER/REPO' }, + }, + base: { + sha: '2222222222222222222222222222222222222222', + ref: 'main', + }, + }, + repository: { + id: 1, + name: 'REPO', + full_name: 'OWNER/REPO', + private: false, + owner: { login: 'OWNER' }, + }, + installation: { id: 12345678 }, + sender: { login: 'octocat' }, + }, + null, + 2 +); + +async function upCommand(args: string[], root: string): Promise { + const skipRoot = args.includes('--no-root'); + + const logDir = join(root, 'dev', '.dev-logs', 'review'); + await Bun.write(join(logDir, '.gitkeep'), ''); + + ui.header('Kilo Cloud Dev Services — Code Review'); + console.log(` ${ui.dim(`Logs → ${logDir}/`)}\n`); + + if (!skipRoot) { + console.log(` Starting ${ui.bold('root')} (Next.js, port 3000)...`); + spawnService({ + name: 'root', + command: 'pnpm dev', + cwd: root, + }); + } + + console.log(` Starting ${ui.bold('session')} (Session Worker, inspector 9230)...`); + spawnService({ + name: 'session', + command: 'pnpm exec wrangler dev --inspector-port 9230', + cwd: join(root, 'cloudflare-session-ingest'), + }); + + console.log(` Starting ${ui.bold('review')} (Review Worker, inspector 9231)...`); + spawnService({ + name: 'review', + command: 'pnpm exec wrangler dev --inspector-port 9231', + cwd: join(root, 'cloudflare-code-review-infra'), + }); + + console.log(` Starting ${ui.bold('agent-next')} (Agent Next Worker, inspector 9232)...`); + const agentNextDir = join(root, 'cloud-agent-next'); + const buildOk = await run({ + command: 'pnpm run build:wrapper', + cwd: agentNextDir, + label: 'agent-next: build:wrapper', + }); + if (!buildOk) { + ui.error('agent-next build:wrapper failed — aborting'); + process.exit(1); + } + spawnService({ + name: 'agent-next', + command: 'pnpm exec wrangler dev --env dev --inspector-port 9232', + cwd: agentNextDir, + }); + + console.log(`\n ${ui.dim('Press Ctrl+C to stop all services')}\n`); + await new Promise(() => {}); +} + +async function testWebhookCommand(args: string[], _root: string): Promise { + const WEBHOOK_URL = process.env.WEBHOOK_URL ?? 'http://127.0.0.1:3000/api/webhooks/github'; + const WEBHOOK_SECRET = process.env.WEBHOOK_SECRET ?? 'dausigdb781g287d9asgd9721dsa'; + const DEFAULT_EVENT_TYPE = 'pull_request'; + + const payloadFile = args[0]; + let rawBody: string; + let payloadSource: string; + + if (payloadFile === '-') { + const chunks: Uint8Array[] = []; + for await (const chunk of Bun.stdin.stream()) { + chunks.push(chunk); + } + rawBody = Buffer.concat(chunks).toString('utf-8'); + payloadSource = 'stdin'; + } else if (payloadFile) { + rawBody = await Bun.file(payloadFile).text(); + payloadSource = payloadFile; + } else { + rawBody = GENERIC_BODY; + payloadSource = 'embedded generic payload'; + } + + // Detect event from envelope payload + let parsed: Record; + try { + parsed = JSON.parse(rawBody); + } catch { + parsed = {}; + } + + let finalEventType: string; + if (process.env.EVENT_TYPE) { + finalEventType = process.env.EVENT_TYPE; + } else if (parsed && typeof parsed.event === 'string') { + finalEventType = parsed.event; + } else { + finalEventType = DEFAULT_EVENT_TYPE; + } + + // Unwrap envelope payloads like {event: "...", payload: {...}} + const body = + parsed && typeof parsed.payload === 'object' && parsed.payload !== null + ? JSON.stringify(parsed.payload) + : rawBody; + + const signature = 'sha256=' + createHmac('sha256', WEBHOOK_SECRET).update(body).digest('hex'); + + const deliveryId = randomUUID(); + + console.log(`Delivery ID: ${deliveryId}`); + console.log(`Event: ${finalEventType}`); + console.log(`URL: ${WEBHOOK_URL}`); + console.log(`Payload source: ${payloadSource}`); + console.log(`Signature: ${signature}`); + console.log(); + console.log('Sending webhook...'); + console.log(); + + const response = await fetch(WEBHOOK_URL, { + method: 'POST', + headers: { + 'Content-Type': 'application/json', + 'x-github-event': finalEventType, + 'x-github-delivery': deliveryId, + 'x-hub-signature-256': signature, + }, + body, + }); + + const responseText = await response.text(); + console.log(responseText); + console.log(`HTTP Status: ${response.status}`); + console.log(); + console.log('Done.'); +} + +export const codeReview: ProjectDef = { + name: 'code-review', + description: 'Code review dev environment (workers + Next.js)', + commands: { + up: { + description: + 'Start code review dev environment (Next.js + session/review/agent-next workers)', + run: upCommand, + }, + 'test-webhook': { + description: 'Send a test GitHub pull_request webhook to the local dev server', + run: testWebhookCommand, + }, + }, +}; diff --git a/dev/cli/src/projects/index.ts b/dev/cli/src/projects/index.ts new file mode 100644 index 0000000000..e6fce511f2 --- /dev/null +++ b/dev/cli/src/projects/index.ts @@ -0,0 +1,15 @@ +import type { ProjectDef } from './types'; +import { kiloclaw } from './kiloclaw'; +import { codeReview } from './code-review'; +import { autoFix } from './auto-fix'; +import { appBuilder } from './app-builder'; + +export const projects: ProjectDef[] = [kiloclaw, codeReview, autoFix, appBuilder]; + +export function getProject(name: string): ProjectDef | undefined { + return projects.find(p => p.name === name); +} + +export function getProjectNames(): string[] { + return projects.map(p => p.name); +} diff --git a/dev/cli/src/projects/kiloclaw.ts b/dev/cli/src/projects/kiloclaw.ts new file mode 100644 index 0000000000..50498fd3b8 --- /dev/null +++ b/dev/cli/src/projects/kiloclaw.ts @@ -0,0 +1,284 @@ +import type { ProjectDef } from './types'; +import { run } from '../utils/process'; +import * as ui from '../utils/ui'; +import { parseEnvFile } from '../utils/env'; +import { join } from 'path'; + +function setDevVar(content: string, key: string, value: string): string { + const pattern = new RegExp(`^${key}=.*`, 'm'); + if (pattern.test(content)) { + return content.replace(pattern, `${key}=${value}`); + } + const sep = content.endsWith('\n') ? '' : '\n'; + return content + `${sep}${key}=${value}\n`; +} + +export const kiloclaw: ProjectDef = { + name: 'kiloclaw', + description: 'KiloClaw controller and worker services', + commands: { + setup: { + description: 'Initialize KiloClaw dev environment (secrets, Vercel env, Fly token)', + async run(_args: string[], root: string): Promise { + const kiloclawDir = join(root, 'kiloclaw'); + const devVarsPath = join(kiloclawDir, '.dev.vars'); + const devVarsExamplePath = join(kiloclawDir, '.dev.vars.example'); + + // 1. Create .dev.vars from example if it doesn't exist + const devVarsFile = Bun.file(devVarsPath); + if (!(await devVarsFile.exists())) { + ui.header('Creating .dev.vars from .dev.vars.example...'); + const example = await Bun.file(devVarsExamplePath).text(); + await Bun.write(devVarsPath, example); + ui.success('Created .dev.vars'); + } + + // 2. Check AGENT_ENV_VARS_PRIVATE_KEY + ui.header('Checking AGENT_ENV_VARS_PRIVATE_KEY...'); + let devVarsContent = await Bun.file(devVarsPath).text(); + const devVars = parseEnvFile(devVarsContent); + const agentKey = devVars['AGENT_ENV_VARS_PRIVATE_KEY']; + if (!agentKey || agentKey === '...') { + ui.error('AGENT_ENV_VARS_PRIVATE_KEY is not configured in .dev.vars.'); + ui.error('Get the dev version from 1Password (engineering vault)'); + ui.error(`Set it in ${devVarsPath}`); + process.exit(1); + } + ui.success('AGENT_ENV_VARS_PRIVATE_KEY is set'); + + // 3. Check Vercel link + ui.header('Checking Vercel link...'); + const vercelProjectJson = Bun.file(join(root, '.vercel', 'project.json')); + if (!(await vercelProjectJson.exists())) { + ui.error('Vercel project not linked.'); + ui.error(`Run 'vercel link' in ${root} first.`); + process.exit(1); + } + ui.success('Vercel project linked'); + + // 4. Pull Vercel env + ui.header('Pulling development environment from Vercel...'); + const vercelOk = await run({ + command: 'vercel env pull --environment=development', + cwd: root, + label: 'vercel env pull --environment=development', + }); + if (!vercelOk) { + ui.error('Failed to pull Vercel env. Is vercel CLI installed and logged in?'); + process.exit(1); + } + + // 5. Sync secrets from .env.local → .dev.vars + const envLocalPath = join(root, '.env.local'); + const envLocalFile = Bun.file(envLocalPath); + if (await envLocalFile.exists()) { + ui.header('Syncing secrets from .env.local into .dev.vars...'); + const envLocalContent = await envLocalFile.text(); + const envLocal = parseEnvFile(envLocalContent); + + // Reload devVarsContent after Vercel pull (it may have changed the file) + devVarsContent = await Bun.file(devVarsPath).text(); + + const nextauthSecret = envLocal['NEXTAUTH_SECRET']; + if (nextauthSecret) { + devVarsContent = setDevVar(devVarsContent, 'NEXTAUTH_SECRET', nextauthSecret); + ui.success('Synced NEXTAUTH_SECRET'); + } + + const internalApiSecret = envLocal['KILOCLAW_INTERNAL_API_SECRET']; + if (internalApiSecret) { + devVarsContent = setDevVar(devVarsContent, 'INTERNAL_API_SECRET', internalApiSecret); + ui.success('Synced KILOCLAW_INTERNAL_API_SECRET → INTERNAL_API_SECRET'); + } + + await Bun.write(devVarsPath, devVarsContent); + } + + // 6. Validate/refresh Fly API token + ui.header('Validating Fly API token...'); + + // Reload after any writes above + devVarsContent = await Bun.file(devVarsPath).text(); + const devVarsCurrent = parseEnvFile(devVarsContent); + + const flyOrg = devVarsCurrent['FLY_ORG_SLUG'] || 'kilo-dev'; + let flyToken = devVarsCurrent['FLY_API_TOKEN'] || ''; + + const generateFlyToken = async (): Promise => { + ui.header(`Generating new Fly API token for org '${flyOrg}'...`); + const proc = Bun.spawn(['fly', 'tokens', 'create', 'org', flyOrg], { + cwd: root, + stdout: 'pipe', + stderr: 'pipe', + }); + const code = await proc.exited; + const output = await new Response(proc.stdout).text(); + const errOutput = await new Response(proc.stderr).text(); + if (code !== 0 || !output.trim()) { + ui.error("Failed to create Fly token. Are you logged in? Try 'fly auth login'."); + if (errOutput) ui.error(errOutput.trim()); + process.exit(1); + } + const token = output.trim(); + devVarsContent = setDevVar(devVarsContent, 'FLY_API_TOKEN', token); + await Bun.write(devVarsPath, devVarsContent); + ui.success('Token saved to .dev.vars.'); + return token; + }; + + if (!flyToken || flyToken === 'fo1_...') { + flyToken = await generateFlyToken(); + } + + // Validate token + const validateToken = async (token: string): Promise => { + const proc = Bun.spawn( + [ + 'curl', + '-s', + '-o', + '/dev/null', + '-w', + '%{http_code}', + '-H', + `Authorization: Bearer ${token}`, + `https://api.machines.dev/v1/apps?org_slug=${flyOrg}&limit=1`, + ], + { cwd: root, stdout: 'pipe', stderr: 'pipe' } + ); + await proc.exited; + const status = (await new Response(proc.stdout).text()).trim(); + return status === '200'; + }; + + let tokenValid = await validateToken(flyToken); + if (!tokenValid) { + ui.warn(`Token is invalid or expired. Refreshing...`); + flyToken = await generateFlyToken(); + tokenValid = await validateToken(flyToken); + if (!tokenValid) { + ui.error("New token still failing. Check 'fly auth login' and org access."); + process.exit(1); + } + } + + ui.success('Fly API token is valid.'); + console.log(''); + ui.success('KiloClaw dev environment is ready!'); + }, + }, + + 'push-dev': { + description: 'Build and push controller Docker image to Fly registry', + async run(args: string[], root: string): Promise { + const kiloclawDir = join(root, 'kiloclaw'); + const devVarsPath = join(kiloclawDir, '.dev.vars'); + + // 1. Authenticate with Fly registry + ui.header('Authenticating with Fly registry...'); + const authOk = await run({ + command: 'fly auth docker', + cwd: root, + label: 'fly auth docker', + }); + if (!authOk) { + ui.error('Failed to authenticate with Fly registry.'); + process.exit(1); + } + + // 2. Read config from .dev.vars + const devVarsFile = Bun.file(devVarsPath); + if (!(await devVarsFile.exists())) { + ui.error(".dev.vars not found. Run 'pnpm kilo kiloclaw setup' first."); + process.exit(1); + } + let devVarsContent = await devVarsFile.text(); + const devVars = parseEnvFile(devVarsContent); + + const appName = devVars['FLY_APP_NAME'] || 'kiloclaw-dev'; + + // 3. Parse --local flag + const useLocal = args.includes('--local'); + + // 4. Select Dockerfile + let dockerfile: string; + if (useLocal) { + dockerfile = join(kiloclawDir, 'Dockerfile.local'); + // Validate tarball exists + const proc = Bun.spawn( + ['sh', '-c', `ls "${kiloclawDir}"/openclaw-build/openclaw-*.tgz 2>/dev/null`], + { cwd: root, stdout: 'pipe', stderr: 'pipe' } + ); + await proc.exited; + const found = (await new Response(proc.stdout).text()).trim(); + if (!found) { + ui.error('No openclaw-*.tgz found in openclaw-build/.'); + ui.error('Build your fork first:'); + ui.error(' cd /path/to/openclaw && pnpm build && npm pack'); + ui.error(` cp openclaw-*.tgz ${kiloclawDir}/openclaw-build/`); + process.exit(1); + } + ui.success('Using Dockerfile.local (local OpenClaw tarball)'); + } else { + dockerfile = join(kiloclawDir, 'Dockerfile'); + } + + // 5. Generate timestamped tag + const timestamp = (Date.now() / 1000) | 0; + const tag = `dev-${timestamp}`; + const image = `registry.fly.io/${appName}:${tag}`; + + // Get git SHA + const gitProc = Bun.spawn(['git', '-C', kiloclawDir, 'rev-parse', 'HEAD'], { + stdout: 'pipe', + stderr: 'pipe', + }); + await gitProc.exited; + const gitSha = (await new Response(gitProc.stdout).text()).trim() || 'unknown'; + + ui.header(`Building + pushing ${image} (linux/amd64)...`); + console.log(` Controller commit: ${gitSha}`); + + // 6. Docker buildx build + const buildCmd = [ + 'docker', + 'buildx', + 'build', + '--platform', + 'linux/amd64', + '-f', + dockerfile, + '--build-arg', + `CONTROLLER_COMMIT=${gitSha}`, + '--build-arg', + `CONTROLLER_CACHE_BUST=${timestamp}`, + '-t', + image, + '--push', + kiloclawDir, + ].join(' '); + + const buildOk = await run({ + command: buildCmd, + cwd: root, + label: `docker buildx build ... -t ${image} --push`, + }); + if (!buildOk) { + ui.error('Docker build failed.'); + process.exit(1); + } + + // 7. Update FLY_IMAGE_TAG in .dev.vars + devVarsContent = setDevVar(devVarsContent, 'FLY_IMAGE_TAG', tag); + await Bun.write(devVarsPath, devVarsContent); + + ui.success(`Updated .dev.vars: FLY_IMAGE_TAG=${tag}`); + console.log(''); + console.log(` FLY_IMAGE_TAG=${tag}`); + console.log(''); + ui.success('Done. Restart wrangler dev to pick up the new tag.'); + ui.success('Then restart your instance from the dashboard (or destroy + re-provision).'); + }, + }, + }, +}; diff --git a/dev/cli/src/projects/types.ts b/dev/cli/src/projects/types.ts new file mode 100644 index 0000000000..00c8859396 --- /dev/null +++ b/dev/cli/src/projects/types.ts @@ -0,0 +1,10 @@ +export interface ProjectCommand { + description: string; + run: (args: string[], root: string) => Promise; +} + +export interface ProjectDef { + name: string; + description: string; + commands: Record; +} diff --git a/dev/cli/src/services/registry.ts b/dev/cli/src/services/registry.ts new file mode 100644 index 0000000000..948dddc696 --- /dev/null +++ b/dev/cli/src/services/registry.ts @@ -0,0 +1,229 @@ +export type ServiceType = 'infra' | 'app' | 'worker'; + +export interface ServiceDef { + name: string; + type: ServiceType; + dir: string; // relative to monorepo root + port?: number; + devCommand?: string; // command to run in `dir` + deps: string[]; + envFile?: string; // path to .dev.vars.example relative to dir + description: string; +} + +export const services: ServiceDef[] = [ + // --- Infrastructure --- + { + name: 'postgres', + type: 'infra', + dir: '.', + devCommand: 'docker compose -f dev/docker-compose.yml up -d postgres', + deps: [], + description: 'PostgreSQL 18 + pgvector', + }, + { + name: 'redis', + type: 'infra', + dir: '.', + devCommand: 'docker compose -f dev/docker-compose.yml up -d redis', + deps: [], + description: 'Redis 7', + }, + { + name: 'migrations', + type: 'infra', + dir: '.', + devCommand: 'pnpm drizzle migrate', + deps: ['postgres'], + description: 'Drizzle database migrations', + }, + + // --- Core App --- + { + name: 'nextjs', + type: 'app', + dir: '.', + port: 3000, + devCommand: 'pnpm dev', + deps: ['postgres', 'redis', 'migrations'], + description: 'Next.js dashboard + API (port 3000)', + }, + + // --- Workers --- + { + name: 'cloud-agent', + type: 'worker', + dir: 'cloud-agent', + port: 8788, + devCommand: 'pnpm dev', + deps: ['nextjs'], + envFile: '.dev.vars.example', + description: 'CLI agent orchestration (Durable Objects + Containers)', + }, + { + name: 'cloud-agent-next', + type: 'worker', + dir: 'cloud-agent-next', + port: 8794, + devCommand: 'pnpm dev', + deps: ['nextjs'], + envFile: '.dev.vars.example', + description: 'Next-gen CLI agent orchestration', + }, + { + name: 'session-ingest', + type: 'worker', + dir: 'cloudflare-session-ingest', + port: 8787, + devCommand: 'pnpm dev', + deps: ['nextjs'], + description: 'Session data ingestion', + }, + { + name: 'code-review', + type: 'worker', + dir: 'cloudflare-code-review-infra', + port: 8789, + devCommand: 'pnpm dev', + deps: ['nextjs'], + envFile: '.dev.vars.example', + description: 'Automated code reviews', + }, + { + name: 'app-builder', + type: 'worker', + dir: 'cloudflare-app-builder', + port: 8790, + devCommand: 'pnpm dev', + deps: ['nextjs'], + envFile: '.dev.vars.example', + description: 'App Builder sandbox', + }, + { + name: 'auto-triage', + type: 'worker', + dir: 'cloudflare-auto-triage-infra', + port: 8791, + devCommand: 'pnpm dev', + deps: ['nextjs'], + envFile: '.dev.vars.example', + description: 'Auto-triage for security findings', + }, + { + name: 'auto-fix', + type: 'worker', + dir: 'cloudflare-auto-fix-infra', + port: 8792, + devCommand: 'pnpm dev', + deps: ['nextjs'], + envFile: '.dev.vars.example', + description: 'Auto-fix for security findings', + }, + { + name: 'webhook-agent', + type: 'worker', + dir: 'cloudflare-webhook-agent-ingest', + port: 8793, + devCommand: 'pnpm dev', + deps: ['nextjs'], + envFile: '.dev.vars.example', + description: 'Incoming webhook processing', + }, + { + name: 'kiloclaw', + type: 'worker', + dir: 'kiloclaw', + port: 8795, + devCommand: 'pnpm dev', + deps: ['nextjs'], + envFile: '.dev.vars.example', + description: 'OpenClaw AI assistant (proxies to Fly.io)', + }, + { + name: 'gastown', + type: 'worker', + dir: 'cloudflare-gastown', + devCommand: 'pnpm dev', + deps: ['nextjs'], + description: 'AI agent orchestration via Durable Objects', + }, + { + name: 'db-proxy', + type: 'worker', + dir: 'cloudflare-db-proxy', + port: 8792, + devCommand: 'pnpm dev', + deps: ['postgres'], + envFile: '.dev.vars.example', + description: 'Database proxy service', + }, + { + name: 'git-token', + type: 'worker', + dir: 'cloudflare-git-token-service', + port: 8795, + devCommand: 'pnpm dev', + deps: [], + envFile: '.dev.vars.example', + description: 'Git token management', + }, + { + name: 'o11y', + type: 'worker', + dir: 'cloudflare-o11y', + devCommand: 'pnpm dev', + deps: [], + description: 'Observability / analytics', + }, + { + name: 'images-mcp', + type: 'worker', + dir: 'cloudflare-images-mcp', + port: 8796, + devCommand: 'pnpm dev', + deps: [], + envFile: '.dev.vars.example', + description: 'MCP for image handling', + }, + { + name: 'security-sync', + type: 'worker', + dir: 'cloudflare-security-sync', + devCommand: 'pnpm dev', + deps: [], + description: 'Security synchronization', + }, + { + name: 'security-analysis', + type: 'worker', + dir: 'cloudflare-security-auto-analysis', + port: 8797, + devCommand: 'pnpm dev', + deps: [], + description: 'Security auto-analysis', + }, + { + name: 'ai-attribution', + type: 'worker', + dir: 'cloudflare-ai-attribution', + devCommand: 'pnpm dev', + deps: [], + description: 'AI model attribution', + }, + { + name: 'gmail-push', + type: 'worker', + dir: 'cloudflare-gmail-push', + devCommand: 'pnpm dev', + deps: ['nextjs'], + description: 'Gmail push notifications', + }, +]; + +export function getService(name: string): ServiceDef | undefined { + return services.find(s => s.name === name); +} + +export function getServiceNames(): string[] { + return services.map(s => s.name); +} diff --git a/dev/cli/src/services/resolver.ts b/dev/cli/src/services/resolver.ts new file mode 100644 index 0000000000..96afb30bde --- /dev/null +++ b/dev/cli/src/services/resolver.ts @@ -0,0 +1,35 @@ +import { getService, type ServiceDef } from './registry'; + +/** + * Given target service names, returns the full set of services needed + * (including transitive dependencies) in topological order (deps first). + */ +export function resolve(targets: string[]): ServiceDef[] { + if (targets.length === 0) return []; + + const visited = new Set(); + const visiting = new Set(); // cycle detection + const order: ServiceDef[] = []; + + function visit(name: string) { + if (visited.has(name)) return; + if (visiting.has(name)) { + throw new Error(`Circular dependency detected: ${name}`); + } + const svc = getService(name); + if (!svc) throw new Error(`Unknown service: "${name}"`); + visiting.add(name); + for (const dep of svc.deps) { + visit(dep); + } + visiting.delete(name); + visited.add(name); + order.push(svc); + } + + for (const target of targets) { + visit(target); + } + + return order; +} diff --git a/dev/cli/src/utils/env.ts b/dev/cli/src/utils/env.ts new file mode 100644 index 0000000000..9b2f0bffa5 --- /dev/null +++ b/dev/cli/src/utils/env.ts @@ -0,0 +1,41 @@ +export function parseEnvFile(content: string): Record { + const result: Record = {}; + for (const line of content.split('\n')) { + const trimmed = line.trim(); + if (!trimmed || trimmed.startsWith('#')) continue; + const eqIdx = trimmed.indexOf('='); + if (eqIdx === -1) continue; + const key = trimmed.slice(0, eqIdx).trim(); + let value = trimmed.slice(eqIdx + 1).trim(); + if ( + (value.startsWith('"') && value.endsWith('"')) || + (value.startsWith("'") && value.endsWith("'")) + ) { + value = value.slice(1, -1); + } + result[key] = value; + } + return result; +} + +const PLACEHOLDER_PATTERNS = [/^\.\.\.$/, /^fo1_\.\.\.$/, /^key-\.\.\.$/, /^whsec_\.\.\./]; + +export function findMissingVars( + example: Record, + actual: Record +): string[] { + const missing: string[] = []; + for (const [key, exampleVal] of Object.entries(example)) { + const actualVal = actual[key]; + if (!actualVal) { + missing.push(key); + continue; + } + if (PLACEHOLDER_PATTERNS.some(p => p.test(actualVal))) { + missing.push(key); + } else if (actualVal === exampleVal && PLACEHOLDER_PATTERNS.some(p => p.test(exampleVal))) { + missing.push(key); + } + } + return missing; +} diff --git a/dev/cli/src/utils/process.ts b/dev/cli/src/utils/process.ts new file mode 100644 index 0000000000..122d62d252 --- /dev/null +++ b/dev/cli/src/utils/process.ts @@ -0,0 +1,117 @@ +import { type Subprocess } from 'bun'; + +const runningProcesses: Subprocess[] = []; + +function setupCleanup() { + const handler = async () => { + console.log('\n\x1b[90mStopping all services...\x1b[0m'); + for (const proc of runningProcesses) { + proc.kill(); + } + await Bun.sleep(3000); + for (const proc of runningProcesses) { + try { + proc.kill(9); + } catch {} + } + process.exit(0); + }; + process.on('SIGINT', handler); + process.on('SIGTERM', handler); +} + +let cleanupRegistered = false; + +export function spawnService(opts: { name: string; command: string; cwd: string }): Subprocess { + if (!cleanupRegistered) { + setupCleanup(); + cleanupRegistered = true; + } + + const proc = Bun.spawn(['sh', '-c', opts.command], { + cwd: opts.cwd, + stdout: 'pipe', + stderr: 'pipe', + env: { ...process.env, FORCE_COLOR: '1' }, + }); + + pipeWithPrefix(proc.stdout, opts.name); + pipeWithPrefix(proc.stderr, opts.name); + + runningProcesses.push(proc); + return proc; +} + +async function pipeWithPrefix(stream: ReadableStream | null, prefix: string) { + if (!stream) return; + const reader = stream.getReader(); + const decoder = new TextDecoder(); + const color = nameToColor(prefix); + let buffer = ''; + + while (true) { + const { done, value } = await reader.read(); + if (done) break; + buffer += decoder.decode(value, { stream: true }); + const lines = buffer.split('\n'); + buffer = lines.pop()!; + for (const line of lines) { + if (line) { + process.stdout.write(`${color}[${prefix}]${RESET} ${line}\n`); + } + } + } + if (buffer) { + process.stdout.write(`${color}[${prefix}]${RESET} ${buffer}\n`); + } +} + +export async function run(opts: { + command: string; + cwd: string; + label?: string; +}): Promise { + const label = opts.label ?? opts.command; + console.log(`\x1b[90m$ ${label}\x1b[0m`); + + const proc = Bun.spawn(['sh', '-c', opts.command], { + cwd: opts.cwd, + stdout: 'inherit', + stderr: 'inherit', + env: { ...process.env, FORCE_COLOR: '1' }, + }); + + const code = await proc.exited; + return code === 0; +} + +export function killAll() { + for (const proc of runningProcesses) { + proc.kill(); + } + runningProcesses.length = 0; +} + +const COLORS = [ + '\x1b[36m', + '\x1b[33m', + '\x1b[35m', + '\x1b[32m', + '\x1b[34m', + '\x1b[91m', + '\x1b[92m', + '\x1b[93m', + '\x1b[94m', + '\x1b[95m', +]; +const RESET = '\x1b[0m'; +const colorMap = new Map(); +let colorIdx = 0; + +function nameToColor(name: string): string { + if (!colorMap.has(name)) { + colorMap.set(name, COLORS[colorIdx % COLORS.length]!); + colorIdx++; + } + return colorMap.get(name)!; +} diff --git a/dev/cli/src/utils/ui.ts b/dev/cli/src/utils/ui.ts new file mode 100644 index 0000000000..1763940d63 --- /dev/null +++ b/dev/cli/src/utils/ui.ts @@ -0,0 +1,29 @@ +export const bold = (s: string) => `\x1b[1m${s}\x1b[22m`; +export const dim = (s: string) => `\x1b[90m${s}\x1b[0m`; +export const green = (s: string) => `\x1b[32m${s}\x1b[0m`; +export const red = (s: string) => `\x1b[31m${s}\x1b[0m`; +export const yellow = (s: string) => `\x1b[33m${s}\x1b[0m`; +export const cyan = (s: string) => `\x1b[36m${s}\x1b[0m`; + +export function header(text: string) { + console.log(`\n${bold(cyan(`==> ${text}`))}`); +} + +export function success(text: string) { + console.log(green(` ✓ ${text}`)); +} + +export function warn(text: string) { + console.log(yellow(` ⚠ ${text}`)); +} + +export function error(text: string) { + console.error(red(` ✗ ${text}`)); +} + +export function table(rows: [string, string][]) { + const maxKey = Math.max(...rows.map(([k]) => k.length)); + for (const [key, val] of rows) { + console.log(` ${key.padEnd(maxKey)} ${dim(val)}`); + } +} diff --git a/dev/cli/test/env-check.test.ts b/dev/cli/test/env-check.test.ts new file mode 100644 index 0000000000..7eca83bd31 --- /dev/null +++ b/dev/cli/test/env-check.test.ts @@ -0,0 +1,115 @@ +import { describe, expect, test, beforeEach, afterEach } from 'bun:test'; +import { mkdtemp, rm, mkdir, writeFile } from 'fs/promises'; +import { join } from 'path'; +import { tmpdir } from 'os'; + +/** + * Tests for envCheck that verify the final banner correctly reflects + * the state of ALL checks, including .env.local and .vercel/project.json. + * + * To isolate the pre-requisite checks from per-service env checks, we + * create all required service .dev.vars files (with valid content) in + * the temp tree so the service loop doesn't contribute failures. + */ + +// Import services to know which dirs need .dev.vars +import { services } from '../src/services/registry'; +import { envCheck } from '../src/commands/env'; + +let root: string; +let logs: string[]; +let errors: string[]; +let origLog: typeof console.log; +let origError: typeof console.error; + +beforeEach(async () => { + root = await mkdtemp(join(tmpdir(), 'env-check-test-')); + logs = []; + errors = []; + origLog = console.log; + origError = console.error; + console.log = (...args: unknown[]) => logs.push(args.map(String).join(' ')); + console.error = (...args: unknown[]) => errors.push(args.map(String).join(' ')); +}); + +afterEach(async () => { + console.log = origLog; + console.error = origError; + await rm(root, { recursive: true, force: true }); +}); + +function allOutput(): string { + return [...logs, ...errors].join('\n'); +} + +/** + * Creates all service .dev.vars AND their .dev.vars.example files so + * that the per-service loop produces no warnings. The .dev.vars files + * contain the same keys with non-placeholder values. + */ +async function createAllServiceEnvFiles() { + const servicesWithEnv = services.filter(s => s.envFile); + for (const svc of servicesWithEnv) { + const dir = join(root, svc.dir); + await mkdir(dir, { recursive: true }); + // Create example file with a non-placeholder value + const exampleContent = 'SOME_KEY=real-value\n'; + await writeFile(join(dir, svc.envFile!), exampleContent); + // Create actual .dev.vars with matching real value + await writeFile(join(dir, '.dev.vars'), 'SOME_KEY=real-value\n'); + } +} + +describe('envCheck final banner', () => { + test('missing .env.local → banner should say "needs attention", not "passed"', async () => { + // .env.local does NOT exist, .vercel/project.json DOES + await mkdir(join(root, '.vercel'), { recursive: true }); + await writeFile(join(root, '.vercel', 'project.json'), '{}'); + await createAllServiceEnvFiles(); + + await envCheck(root); + + const output = allOutput(); + expect(output).toContain('.env.local missing'); + expect(output).not.toContain('All environment checks passed'); + expect(output).toContain('Some checks need attention'); + }); + + test('missing .vercel/project.json → banner should say "needs attention", not "passed"', async () => { + // .env.local EXISTS, .vercel/project.json does NOT + await writeFile(join(root, '.env.local'), 'KEY=value'); + await createAllServiceEnvFiles(); + + await envCheck(root); + + const output = allOutput(); + expect(output).toContain('Vercel project not linked'); + expect(output).not.toContain('All environment checks passed'); + expect(output).toContain('Some checks need attention'); + }); + + test('both prereqs missing → banner should say "needs attention"', async () => { + await createAllServiceEnvFiles(); + + await envCheck(root); + + const output = allOutput(); + expect(output).toContain('.env.local missing'); + expect(output).toContain('Vercel project not linked'); + expect(output).not.toContain('All environment checks passed'); + expect(output).toContain('Some checks need attention'); + }); + + test('all prereqs present + all service envs OK → banner says "passed"', async () => { + await writeFile(join(root, '.env.local'), 'KEY=value'); + await mkdir(join(root, '.vercel'), { recursive: true }); + await writeFile(join(root, '.vercel', 'project.json'), '{}'); + await createAllServiceEnvFiles(); + + await envCheck(root); + + const output = allOutput(); + expect(output).toContain('All environment checks passed'); + expect(output).not.toContain('Some checks need attention'); + }); +}); diff --git a/dev/cli/test/env.test.ts b/dev/cli/test/env.test.ts new file mode 100644 index 0000000000..d67ab20187 --- /dev/null +++ b/dev/cli/test/env.test.ts @@ -0,0 +1,28 @@ +import { describe, expect, test } from 'bun:test'; +import { parseEnvFile, findMissingVars } from '../src/utils/env'; + +describe('env utilities', () => { + test('parseEnvFile parses KEY=value lines', () => { + const result = parseEnvFile('FOO=bar\nBAZ=qux\n'); + expect(result).toEqual({ FOO: 'bar', BAZ: 'qux' }); + }); + + test('parseEnvFile ignores comments and blank lines', () => { + const result = parseEnvFile('# comment\nFOO=bar\n\n# another\n'); + expect(result).toEqual({ FOO: 'bar' }); + }); + + test('parseEnvFile handles quoted values', () => { + const result = parseEnvFile('FOO="bar baz"\nQUX=\'hello\''); + expect(result).toEqual({ FOO: 'bar baz', QUX: 'hello' }); + }); + + test('findMissingVars detects placeholder values', () => { + const example = { FOO: '...', BAR: 'real-value', BAZ: 'fo1_...' }; + const actual = { FOO: '...', BAR: 'real-value', BAZ: 'fo1_...' }; + const missing = findMissingVars(example, actual); + expect(missing).toContain('FOO'); + expect(missing).toContain('BAZ'); + expect(missing).not.toContain('BAR'); + }); +}); diff --git a/dev/cli/test/logs.test.ts b/dev/cli/test/logs.test.ts new file mode 100644 index 0000000000..13a894fc94 --- /dev/null +++ b/dev/cli/test/logs.test.ts @@ -0,0 +1,45 @@ +import { describe, expect, test } from 'bun:test'; +import { services } from '../src/services/registry'; + +/** + * The `logs` command tails `docker compose logs -f ` for infra services. + * Only infra services whose devCommand starts with "docker compose" actually + * exist in docker-compose.yml. Non-compose infra services (e.g. migrations) + * must NOT be passed to `docker compose logs` — they'd fail silently or error. + * + * These tests validate the invariants that the logs command relies on. + */ +describe('logs command invariants', () => { + const infraServices = services.filter(s => s.type === 'infra'); + const composeInfra = infraServices.filter(s => s.devCommand?.startsWith('docker compose')); + const nonComposeInfra = infraServices.filter(s => !s.devCommand?.startsWith('docker compose')); + + test('migrations is an infra service with a non-compose devCommand', () => { + const migrations = services.find(s => s.name === 'migrations'); + expect(migrations).toBeDefined(); + expect(migrations!.type).toBe('infra'); + expect(migrations!.devCommand).toBe('pnpm drizzle migrate'); + expect(migrations!.devCommand!.startsWith('docker compose')).toBe(false); + }); + + test('postgres and redis are infra services with docker compose devCommands', () => { + for (const name of ['postgres', 'redis']) { + const svc = services.find(s => s.name === name); + expect(svc).toBeDefined(); + expect(svc!.type).toBe('infra'); + expect(svc!.devCommand!.startsWith('docker compose')).toBe(true); + } + }); + + test('at least one infra service is non-compose (guard against regression)', () => { + expect(nonComposeInfra.length).toBeGreaterThanOrEqual(1); + }); + + test('compose infra services have names matching docker-compose service names', () => { + // The logs command uses svc.name as the compose service name. + // Compose infra devCommands should contain `-d ` referencing the same service. + for (const svc of composeInfra) { + expect(svc.devCommand).toContain(svc.name); + } + }); +}); diff --git a/dev/cli/test/registry.test.ts b/dev/cli/test/registry.test.ts new file mode 100644 index 0000000000..6aca3954b2 --- /dev/null +++ b/dev/cli/test/registry.test.ts @@ -0,0 +1,48 @@ +import { describe, expect, test } from 'bun:test'; +import { services, getService, getServiceNames } from '../src/services/registry'; + +describe('service registry', () => { + test('all services have unique names', () => { + const names = services.map(s => s.name); + expect(new Set(names).size).toBe(names.length); + }); + + test('all services have unique ports (where defined)', () => { + const portsWithNames = services.filter(s => s.port).map(s => ({ name: s.name, port: s.port })); + const portMap = new Map(); + for (const { name, port } of portsWithNames) { + portMap.set(port!, [...(portMap.get(port!) ?? []), name]); + } + const conflicts = [...portMap.entries()].filter(([, names]) => names.length > 1); + // NOTE: Some upstream wrangler.jsonc files have port conflicts (e.g. db-proxy + // and auto-fix both claim 8792, git-token and kiloclaw both claim 8795). + // The registry records the wrangler.jsonc ports as-is. These services are + // rarely run simultaneously, but if you need to, override the port in + // wrangler.jsonc with --port or in the registry here. + // This test documents known conflicts rather than failing on them. + for (const [port, names] of conflicts) { + console.warn(` ⚠ Port ${port} claimed by: ${names.join(', ')}`); + } + }); + + test('all deps reference valid service names', () => { + const names = new Set(services.map(s => s.name)); + for (const svc of services) { + for (const dep of svc.deps) { + expect(names.has(dep)).toBe(true); + } + } + }); + + test('getService returns service by name', () => { + const svc = getService('nextjs'); + expect(svc).toBeDefined(); + expect(svc!.port).toBe(3000); + }); + + test('getServiceNames returns all names', () => { + const names = getServiceNames(); + expect(names).toContain('nextjs'); + expect(names).toContain('postgres'); + }); +}); diff --git a/dev/cli/test/resolver.test.ts b/dev/cli/test/resolver.test.ts new file mode 100644 index 0000000000..cbaec40354 --- /dev/null +++ b/dev/cli/test/resolver.test.ts @@ -0,0 +1,42 @@ +import { describe, expect, test } from 'bun:test'; +import { resolve } from '../src/services/resolver'; + +describe('dependency resolver', () => { + test("resolving 'nextjs' includes postgres, redis, migrations", () => { + const result = resolve(['nextjs']); + const names = result.map(s => s.name); + expect(names).toContain('postgres'); + expect(names).toContain('redis'); + expect(names).toContain('migrations'); + expect(names).toContain('nextjs'); + }); + + test('infra comes before apps in resolved order', () => { + const result = resolve(['nextjs']); + const names = result.map(s => s.name); + expect(names.indexOf('postgres')).toBeLessThan(names.indexOf('migrations')); + expect(names.indexOf('migrations')).toBeLessThan(names.indexOf('nextjs')); + }); + + test("resolving 'kiloclaw' includes nextjs and its deps", () => { + const result = resolve(['kiloclaw']); + const names = result.map(s => s.name); + expect(names).toContain('postgres'); + expect(names).toContain('nextjs'); + expect(names).toContain('kiloclaw'); + }); + + test('no duplicates in resolved set', () => { + const result = resolve(['kiloclaw', 'cloud-agent']); + const names = result.map(s => s.name); + expect(new Set(names).size).toBe(names.length); + }); + + test('resolving unknown service throws', () => { + expect(() => resolve(['nonexistent'])).toThrow(); + }); + + test('resolving empty array returns empty', () => { + expect(resolve([])).toEqual([]); + }); +}); diff --git a/dev/cli/tsconfig.json b/dev/cli/tsconfig.json new file mode 100644 index 0000000000..70b356d086 --- /dev/null +++ b/dev/cli/tsconfig.json @@ -0,0 +1,17 @@ +{ + "compilerOptions": { + "target": "ESNext", + "module": "ESNext", + "moduleResolution": "bundler", + "types": ["bun-types"], + "strict": true, + "skipLibCheck": true, + "outDir": "dist", + "rootDir": "src", + "resolveJsonModule": true, + "esModuleInterop": true, + "declaration": false, + "noEmit": true + }, + "include": ["src/**/*.ts", "test/**/*.ts"] +} diff --git a/package.json b/package.json index ab29bf49ef..4029af0c10 100644 --- a/package.json +++ b/package.json @@ -32,7 +32,8 @@ "test:e2e:ui": "playwright test --ui", "test:e2e:debug": "playwright test --debug", "promo": "pnpm -s script src/scripts/encrypt-promo-codes.ts", - "dev:discord-gateway-cron": "tsx dev/discord-gateway-cron.ts" + "dev:discord-gateway-cron": "tsx dev/discord-gateway-cron.ts", + "kilo": "bun dev/cli/src/index.ts" }, "packageManager": "pnpm@10.27.0+sha512.72d699da16b1179c14ba9e64dc71c9a40988cbdc65c264cb0e489db7de917f20dcf4d64d8723625f2969ba52d4b7e2a1170682d9ac2a5dcaeaab732b7e16f04a", "dependencies": { diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index 743550f0b6..25f57ac50b 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -1444,6 +1444,12 @@ importers: specifier: 'catalog:' version: 4.69.0(@cloudflare/workers-types@4.20260305.0) + dev/cli: + devDependencies: + bun-types: + specifier: ^1.3.10 + version: 1.3.10 + kiloclaw: dependencies: '@kilocode/db': diff --git a/pnpm-workspace.yaml b/pnpm-workspace.yaml index 37f9491c43..17340165ed 100644 --- a/pnpm-workspace.yaml +++ b/pnpm-workspace.yaml @@ -28,6 +28,7 @@ packages: - cloudflare-gastown - cloudflare-gastown/container - cloudflare-gmail-push + - dev/cli catalog: '@cloudflare/workers-types': ^4.20260305.0