From d99e5e686148251a75f3cea560e3d4ac5f2d8857 Mon Sep 17 00:00:00 2001 From: weroperking <139503221+weroperking@users.noreply.github.com> Date: Wed, 29 Apr 2026 16:13:33 +0000 Subject: [PATCH] refactor(cli): add IaC sync/generate commands, centralize config loading, improve auth check --- docs/testing/test-suite-upgrade.md | 946 ++++++++++++++++++++ packages/cli/src/commands/auth.ts | 4 +- packages/cli/src/commands/branch.ts | 433 ++++----- packages/cli/src/commands/generate.ts | 2 +- packages/cli/src/commands/graphql.ts | 2 +- packages/cli/src/commands/login.ts | 32 +- packages/cli/src/commands/migrate.ts | 247 +++-- packages/cli/src/commands/rls-test.ts | 9 +- packages/cli/src/commands/rls.ts | 20 +- packages/cli/src/commands/storage.ts | 47 +- packages/cli/src/commands/webhook.ts | 349 +++----- packages/cli/src/index.ts | 41 +- packages/cli/src/utils/config.ts | 64 ++ packages/cli/src/utils/context-generator.ts | 2 +- packages/cli/src/utils/schema-scanner.ts | 2 - packages/cli/test/error-messages.test.ts | 2 +- 16 files changed, 1440 insertions(+), 762 deletions(-) create mode 100644 docs/testing/test-suite-upgrade.md create mode 100644 packages/cli/src/utils/config.ts delete mode 100644 packages/cli/src/utils/schema-scanner.ts diff --git a/docs/testing/test-suite-upgrade.md b/docs/testing/test-suite-upgrade.md new file mode 100644 index 0000000..d9209d9 --- /dev/null +++ b/docs/testing/test-suite-upgrade.md @@ -0,0 +1,946 @@ +# BetterBase Comprehensive Test Suite Upgrade + +## 1) Objective (final outcome) + +Deliver a **production-grade test suite** for the BetterBase CLI (`@betterbase/cli`) that: + +- Replaces 31% meaningful coverage with **90%+ behavioral coverage** across all command modules +- Eliminates 8 stub files (every assertion is `expect(true).toBe(true)`) and 3 skeleton files (type checks only) +- Adds **integration tests against a real SQLite database** for data-dependent commands +- Adds **CLI argument parsing regression tests** that feed real argv arrays into Commander +- Adds **output format snapshot tests** to catch silent formatting regressions +- Adds **credential and auth lifecycle tests** covering the entire login→authenticated call path +- Establishes **shared test fixtures and harnesses** reusable across all command test files + +--- + +## 2) Scope map (what is covered) + +### Included (packages/cli — the `bb` command) + +| Layer | Files | Current State | +|-------|-------|---------------| +| Entry point / Commander wiring | `src/index.ts` | Smoke test only (checks command names exist) | +| Auth & credentials | `src/commands/login.ts`, `src/utils/credentials.ts`, `src/utils/api-client.ts` | **Zero coverage** — 16 stubs | +| Config loading utilities | `src/utils/config.ts` (new shared utility) | **Zero coverage** | +| All command modules | `src/commands/*.ts` (14 subcommands + IAC submodules) | 10 of 22 command files have meaningful tests | +| Dev infrastructure | `src/commands/dev.ts`, `src/commands/dev/*.ts` | Skeleton only (checks dir exists) | +| Utilities | `src/utils/*.ts` | Partial (logger, prompts, scanner tested; spinner, credentials, api-client untested) | + +### Excluded from this phase + +- `packages/core/` — Core SDK has its own test regime (not CLI's responsibility) +- `packages/server/` — Server package tested separately +- `packages/client/` — Client SDK tested separately +- `apps/dashboard/` — Frontend tested separately +- `templates/` — Template projects are not individually tested + +--- + +## 3) Current state assessment + +### 3.1 Overall health + +| Metric | Value | +|--------|-------| +| Total test files | 26 | +| Files with real assertions (Good) | 8 (31%) | +| Files partially tested | 7 (27%) | +| Skeleton files (barely tests anything) | 3 (12%) | +| Stub files (`expect(true).toBe(true)`) | 8 (31%) | +| Source files with any meaningful coverage | 10 of ~37 (27%) | +| Test-to-source ratio | Meaningful tests cover only happy-path of 10 files | + +### 3.2 Per-file quality matrix + +#### Tier 1 — Good (keep as-is, add edge cases) + +| File | What It Tests | Quality Notes | +|------|--------------|---------------| +| `migrate-utils.test.ts` | `calculateChecksum`, `parseMigrationFilename`, `getDatabaseType`, `getMigrationsTableSql` | Full coverage of all 4 exports. Determinism, edge cases, env var save/restore. | +| `migrate.test.ts` | `splitStatements`, `analyzeMigration` | Full coverage. Quoted semicolons, case-insensitive, IF NOT EXISTS variants. | +| `prompts.test.ts` | `prompts.text`, `prompts.confirm`, `prompts.select` | Good edge cases. Empty validation, invalid defaults. | +| `provider-prompts.test.ts` | `generateEnvContent`, `generateEnvExampleContent` | Full string content assertions across all providers. | +| `migrate-from-convex.test.ts` | `runMigrateFromConvex` | Validates JSON report structure, compatibility blockers, severity levels. | +| `context-generator.test.ts` | `ContextGenerator.generate()` | Tests full/missing/empty schema and routes. Validates persisted JSON. | +| `generate-crud.test.ts` | `runGenerateCrudCommand` | Scaffolds temp project, validates file content, error cases. | +| `edge-cases.test.ts` | SchemaScanner, RouteScanner, ContextGenerator with malformed inputs | Good edge coverage: empty files, syntax errors, long names, deeply nested code. | + +#### Tier 2 — Partial (expand scope) + +| File | Current Coverage | Missing | +|------|-----------------|---------| +| `auth-command.test.ts` | `runAuthSetupCommand` only | `runAuthAddProviderCommand` has zero coverage | +| `route-scanner.test.ts` | 1 scenario (GET + POST with auth) | Need: PATCH/DELETE, no-auth routes, malformed decorators, nested route groups | +| `scanner.test.ts` | 1 scenario (3 tables) | Need: empty tables, no relations, circular FK, array columns, enums | +| `logger.test.ts` | "doesn't crash" only | Need: actual stderr/stdout capture, format verification, unicode boundary tests | +| `error-messages.test.ts` | String content of simulated errors | Need: exercise actual error-producing code paths, not mock strings | +| `graphql-type-map.test.ts` | 28 tests BUT duplicates the function locally | Fix: import `drizzleTypeToGraphQL` from `src/commands/graphql.ts` | +| `iac-commands.test.ts` | Mock literals only | Fix: import and exercise `runIacAnalyze`, `runIacExport`, `runIacImport` | +| `smoke.test.ts` | Checks command names exist | Expand: verify full subcommand tree, option names, help text content | + +#### Tier 3 — Skeleton (rewrite completely) + +| File | What It Does Now | What It Should Do | +|------|-----------------|-------------------| +| `dev.test.ts` | Creates/deletes temp dirs | Start dev server, verify it boots, send HTTP request, test graceful shutdown | +| `init.test.ts` | Checks type shape | Scaffold a real project with `runInitCommand`, verify all expected files exist | +| `login-commands.test.ts` | 16 stubs of `expect(true).toBe(true)` | Test credential save/load, device code flow mock, token validation, logout | + +#### Tier 4 — Stub (rewrite from scratch) + +These 8 files contain **zero real assertions** against source code: + +| File | Stub Count | Will Be Replaced With | +|------|-----------|----------------------| +| `branch-commands.test.ts` | 17 | Config load → branch create/list/delete/sleep/wake lifecycle tests | +| `function-commands.test.ts` | 10 | Function create/dev/build/list/logs/deploy tests | +| `login-commands.test.ts` | 16 | Credential lifecycle + auth flow tests | +| `rls-commands.test.ts` | 13 | Policy create/list/disable tests | +| `rls-test-command.test.ts` | 7 | RLS evaluation with real PostgreSQL schema | +| `storage-commands.test.ts` | 11 | Storage init (prompt flow), bucket list, file upload | +| `webhook-commands.test.ts` | 17 | Webhook create/list/test/logs lifecycle tests | +| `auth-commands.test.ts` | 9 | (Merged into auth-command.test.ts — this file is redundant) | + +--- + +## 4) Source files with zero test coverage + +| Source File | Reason Untested | Priority | +|-------------|----------------|----------| +| `src/commands/webhook.ts` | Stub file only | P0 | +| `src/commands/branch.ts` | Stub file only | P0 | +| `src/commands/rls-test.ts` | Stub file only | P1 | +| `src/commands/login.ts` | Stub file only | P0 | +| `src/commands/storage.ts` | Stub file only | P1 | +| `src/commands/rls.ts` | Stub file only | P1 | +| `src/commands/function.ts` | Stub file only | P0 | +| `src/commands/graphql.ts` | `graphql-type-map.test.ts` duplicates the function — doesn't import source | P2 | +| `src/commands/dev.ts` | Skeleton only | P0 | +| `src/commands/init.ts` | Skeleton only | P2 | +| `src/commands/auth.ts` (addProvider) | Only `runAuthSetupCommand` is tested | P1 | +| `src/commands/iac/sync.ts` | Never imported by any test | P1 | +| `src/commands/iac/generate.ts` | Never imported by any test | P1 | +| `src/commands/iac/export.ts` | Stub literals only | P3 | +| `src/commands/iac/import.ts` | Stub literals only | P3 | +| `src/commands/iac/analyze.ts` | Stub literals only | P2 | +| `src/commands/dev/process-manager.ts` | No dev tests | P0 | +| `src/commands/dev/watcher.ts` | No dev tests | P0 | +| `src/commands/dev/error-formatter.ts` | No dev tests | P2 | +| `src/commands/dev/query-log.ts` | No dev tests | P3 | +| `src/commands/auth-providers.ts` | Only called internally by `auth.ts` | P2 | +| `src/utils/api-client.ts` | Credentials/network dependency | P0 | +| `src/utils/credentials.ts` | File I/O never tested | P0 | +| `src/utils/config.ts` | New shared utility, untested | P0 | +| `src/utils/spinner.ts` | Never tested | P3 | +| `src/build.ts` | Build script | P3 | + +--- + +## 5) Categories of testing that are completely absent + +### 5.1 Authentication/credential lifecycle (P0) + +**Impact:** Every authenticated command path is untestable. + +The entire login flow (`bb login`, `bb login --email`, device code OAuth, API key login, token storage, `isAuthenticated`, `clearCredentials`, credential file read/write with Zod schema validation) has zero test coverage. The `api-client.ts` module (the authenticated `apiRequest()` wrapper called by `branch.ts`, `webhook.ts`, `storage.ts`, `function.ts`) is also untested. + +**What's needed:** +- Unit tests for `saveCredentials` / `loadCredentials` / `clearCredentials` against temp `~/.betterbase/credentials.json` +- Unit tests for Zod credential schema validation (corrupt JSON, missing fields, expired timestamps) +- Unit tests for `isAuthenticated` with present/absent/expired credentials +- Mocked fetch tests for `runLoginCommand` device code flow (device/code → device/token → admin/auth/me) +- Mocked fetch tests for `runApiKeyLogin` admin/auth/login flow +- Mocked fetch tests for `apiRequest()` with valid/invalid/expired tokens + +### 5.2 CLI argument parsing regression (P0) + +**Impact:** No test verifies that Commander correctly parses user input into command functions. + +All 14+ subcommands register arguments and options via Commander. Zero tests feed argv arrays into `createProgram().parseAsync()` and assert parsed values. + +**What's needed:** +- For each major subcommand, a matrix test that feeds known argv arrays and validates: + - Required arguments arrive at the correct positional position + - Optional arguments default to the documented value + - Boolean flags (`--force`, `--sync-env`, `--dry-run`, `--debug`) parse correctly + - String/number options (`--output `, `--steps `, `--limit `) parse correctly + - Unknown commands and missing required args produce Commander errors + - `--help` output contains expected subcommand descriptions + +### 5.3 Output format snapshot tests (P1) + +**Impact:** The recent UX polish pass changed output formatting across 15 files. No test verifies the formatted output hasn't regressed. + +Commands like `bb migrate preview`, `bb webhook list`, `bb rls list`, `bb branch list`, `bb function list` produce structured terminal output. If a formatting change breaks alignment, color coding, or symbol rendering, no test will catch it. + +**What's needed:** +- Capture stdout from command functions (not subprocess — just string output) +- Assert exact format for table headers, column alignment, color codes, logger symbols +- Snapshot approach: save golden output strings, diff on change +- Cover at minimum: migrate preview, webhook list, webhook logs, branch list, function list, rls list, storage list + +### 5.4 Config file discovery and validation (P1) + +**Impact:** `utils/config.ts` has `findConfigFile`, `loadConfig`, `readConfigFile` — all untested. This single module feeds every command that reads `betterbase.config.ts`. + +**What's needed:** +- `findConfigFile` discovers `.ts` / `.js` / `.mts` variants in order +- `loadConfig` correctly parses known-good config files +- `loadConfig` rejects malformed configs with expected Zod error shape +- `loadConfig` handles dynamic `import()` failures gracefully (missing file, syntax error) +- `readConfigFile` returns raw content for config mutation commands + +### 5.5 SQLite in-memory integration harness (P2) + +**Impact:** Migration commands, webhook log queries, RLS tests, and branch management all operate on database state. No test verifies actual data persistence, query correctness, or schema changes. + +**What's needed:** +- A shared test fixture (`test/fixtures/database.ts`) that: + - Creates a `:memory:` SQLite database + - Runs a known Drizzle schema (tables, indexes, foreign keys) + - Inserts seed data + - Exposes `db` and `schema` for command functions to use + - Cleans up after each test +- Apply migration SQL and verify table structure changed +- Query webhook delivery logs from a real `_betterbase_webhook_deliveries` table +- Test `getDatabaseConnection` with SQLite and PostgreSQL connection strings + +### 5.6 Dev server lifecycle tests (P2) + +**Impact:** `bb dev` is the most complex command (ProcessManager, DevWatcher, ContextGenerator, IAC sync/generate orchestration). The existing `dev.test.ts` only checks directory existence. + +**What's needed:** +- Start the dev server process, verify it binds to the expected port +- Send an HTTP GET to `localhost:3000/health` and assert 200 +- Trigger a file change in `src/db/schema.ts`, verify context regeneration +- Trigger a file change in `betterbase/schema.ts`, verify IAC sync + server restart +- Send SIGTERM and verify graceful shutdown (cleanup called, ports released) +- Verify query log enable/disable via `QUERY_LOG` env var + +### 5.7 Webhook lifecycle tests (P2) + +**Impact:** `webhook.ts` has 17 stub tests. The webhook create command mutates `betterbase.config.ts` with regex-based string manipulation — highly regression-prone. + +**What's needed:** +- Create a webhook entry in a temp config file, verify the config file content +- Add a second webhook, verify both entries exist +- List webhooks from a known config, verify output format +- Test webhook dispatch with mocked `WebhookDispatcher` +- Query webhook delivery logs from SQLite in-memory DB +- Test missing env var error paths (both URL and secret) + +### 5.8 Branch management lifecycle tests (P2) + +**Impact:** `branch.ts` has 17 stub tests. + +**What's needed:** +- Load a known `betterbase.config.ts`, create a branch via `createBranchManager` +- List branches, verify count and metadata +- Delete a branch, verify it's removed from the manager +- Sleep/wake transitions, verify status change +- Status command returns expected shape +- Error paths: missing config file, unknown branch name, duplicate branch name + +### 5.9 RLS policy lifecycle tests (P2) + +**Impact:** `rls.ts` and `rls-test.ts` have 20 stub tests combined. + +**What's needed:** +- Create a policy file for a table, verify file content matches template +- List policy files from a directory +- Disable instruction output verification +- Duplicate policy creation warning +- `runRLSTestCommand` with a real PostgreSQL schema and RLS policies +- Verify test results JSON structure +- Verify cleanup: test schema is dropped after test + +### 5.10 Function lifecycle tests (P2) + +**Impact:** `function.ts` has 10 stub tests. + +**What's needed:** +- Create a function directory, verify generated `index.ts` and `config.ts` content +- List functions from a project directory +- Build a function, verify output bundle exists +- Deploy with mocked `deployToCloudflare` / `deployToVercel` +- Logs command with mocked `getCloudflareLogs` / `getVercelLogs` +- Error paths: duplicate function name, missing function, invalid name characters + +### 5.11 Storage bucket lifecycle tests (P2) + +**Impact:** `storage.ts` has 11 stub tests. + +**What's needed:** +- Storage init: prompt flow verification (provider selection → credential prompts → config file update) +- Config file mutation: verify `betterbase.config.ts` is updated with the correct storage block +- Env file mutation: verify `.env` gets storage credentials +- Gitignore update: verify `.gitignore` gets provider-specific patterns +- List buckets with mocked `createS3Adapter` +- Upload file with mocked adapter, verify public URL generation + +### 5.12 IAC workflow integration tests (P3) + +**Impact:** `iac/sync.ts`, `iac/generate.ts`, `iac/export.ts`, `iac/import.ts`, `iac/analyze.ts` have no real source imports in tests. + +**What's needed:** +- `runIacSync`: load a `betterbase/schema.ts`, compare with serialized schema, verify migration SQL generated +- `runIacGenerate`: discover functions from `betterbase/`, verify `api.d.ts` content +- `runIacAnalyze`: scan query files, verify complexity analysis output +- `runIacExport` / `runIacImport`: verify placeholder output format (until server integration) + +### 5.13 Spinner utility tests (P3) + +**Impact:** `withSpinner` is used by every command that performs async work. Untested. + +**What's needed:** +- `createSpinner` returns configured Ora instance +- `withSpinner` calls task, persists on success +- `withSpinner` catches error, persists with failText, re-throws +- Timer behavior: elapsed time formatting + +### 5.14 End-to-end binary tests (P3) + +**Impact:** No test spawns the actual compiled `bb` binary. + +**What's needed:** +- `bun run dist/index.js --version` → exits 0, stdout contains version +- `bun run dist/index.js --help` → exits 0, stdout contains subcommand list +- `bun run dist/index.js init --help` → exits 0 +- `bun run dist/index.js unknown-command` → exits non-zero + +### 5.15 GraphQL command tests (P3) + +**Impact:** `commands/graphql.ts` is untested. The type-map test duplicates the function rather than importing from source. + +**What's needed:** +- Fix `graphql-type-map.test.ts` to import `drizzleTypeToGraphQL` from `src/commands/graphql.ts` +- Test `generateSDL` and `generateServerSetup` with known table inputs +- Test `runGraphqlPlaygroundCommand` with mocked health endpoint and platform-specific open commands +- Test `runGenerateGraphqlCommand` with a real schema file → verify `.graphql` output + +--- + +## 6) Prioritized implementation plan + +### Phase 1: Foundation (P0 — weeks 1-2) + +**Goal:** Unlock testing of all authenticated commands and config-dependent paths. + +| # | Task | Effort | Depends On | +|---|------|--------|------------| +| 1.1 | Create shared test fixtures module (`test/fixtures.ts`) with temp directory scaffolding, schema file generators, config file generators | Medium | — | +| 1.2 | Create shared SQLite in-memory harness (`test/fixtures/database.ts`) | Medium | 1.1 | +| 1.3 | Write `credentials.test.ts` (save, load, clear, corrupt file, expired token, Zod validation) | Small | 1.1 | +| 1.4 | Write `api-client.test.ts` with mocked fetch (valid token, expired token, network error, 401 response) | Small | 1.1 | +| 1.5 | Write `login-commands.test.ts` (device code flow mock, API key login mock, logout, `isAuthenticated`) | Medium | 1.3, 1.4 | +| 1.6 | Write `config.test.ts` (`findConfigFile` discovery, `loadConfig` parse/reject, `readConfigFile` raw content) | Small | 1.1 | +| 1.7 | Write CLI argument parsing matrix tests (`test/cli-parsing.test.ts`) | Medium | — | + +### Phase 2: Stub replacement (P0-P1 — weeks 2-4) + +**Goal:** Replace all 8 stub test files with real behavioral tests. + +| # | Task | Effort | Depends On | +|---|------|--------|------------| +| 2.1 | Rewrite `branch-commands.test.ts` (17 tests → real config load, CRUD lifecycle, sleep/wake, error paths) | Medium | 1.6 | +| 2.2 | Rewrite `webhook-commands.test.ts` (17 tests → create/list/test/dispatch/logs with SQLite) | Medium | 1.2, 1.6 | +| 2.3 | Rewrite `function-commands.test.ts` (10 tests → create/dev/build/list/deploy/logs) | Medium | 1.1 | +| 2.4 | Rewrite `storage-commands.test.ts` (11 tests → init prompt flow, config/env mutation, upload/list) | Medium | 1.1, 1.6 | +| 2.5 | Rewrite `rls-commands.test.ts` (13 tests → create/list/disable, duplicate warning, PostgreSQL test schema) | Medium | 1.1, 1.2 | +| 2.6 | Rewrite `rls-test-command.test.ts` (7 tests → RLS evaluation, test results JSON, schema cleanup) | Large | 1.2 | +| 2.7 | Delete `auth-commands.test.ts` (9 stubs, redundant with `auth-command.test.ts`) | Trivial | — | +| 2.8 | Rewrite `dev.test.ts` (3 skeleton tests → full dev server lifecycle) | Large | 1.1 | + +### Phase 3: Coverage expansion (P1-P2 — weeks 4-6) + +**Goal:** Deepen existing partial tests and add missing integration scenarios. + +| # | Task | Effort | Depends On | +|---|------|--------|------------| +| 3.1 | Expand `scanner.test.ts` (1 scenario → 5+: empty tables, no relations, circular FK, array columns, enums, large schemas) | Small | — | +| 3.2 | Expand `route-scanner.test.ts` (1 scenario → 5+: PATCH/DELETE, no-auth routes, nested route groups, malformed decorators) | Small | — | +| 3.3 | Expand `logger.test.ts` (add stdout/stderr capture, format verification, color code output, unicode boundary) | Small | — | +| 3.4 | Fix `graphql-type-map.test.ts` (import from source instead of duplicating function) | Small | — | +| 3.5 | Expand `iac-commands.test.ts` (import and exercise runIacAnalyze, runIacExport, runIacImport from source) | Medium | 1.1 | +| 3.6 | Add output format snapshot tests for all list/render commands | Medium | 1.1 | +| 3.7 | Add `auth-providers.test.ts` (verify all 7 provider templates, env var generation, config code structure) | Small | — | + +### Phase 4: Deep integration (P2-P3 — weeks 6-8) + +**Goal:** End-to-end flows, spinners, build verification, and edge hardening. + +| # | Task | Effort | Depends On | +|---|------|--------|------------| +| 4.1 | Dev server integration tests (start → health check → file change → restart → shutdown) | Large | 1.1, 2.8 | +| 4.2 | IAC workflow integration (sync → generate → analyze — full pipeline) | Medium | 1.1 | +| 4.3 | `spinner.test.ts` (createSpinner, withSpinner success/failure/timer) | Small | — | +| 4.4 | End-to-end binary smoke tests (spawn `bb`, verify exit codes and stdout) | Small | — | +| 4.5 | Migrate cross-product integration (migrate → graphql regenerate → context regenerate) | Medium | 1.2 | + +--- + +## 7) Shared test infrastructure to build + +### 7.1 Fixture module (`test/fixtures.ts`) + +```typescript +// test/fixtures.ts +import { mkdirSync, writeFileSync } from "node:fs"; +import { join } from "node:path"; +import { tmpdir } from "node:os"; +import { randomUUID } from "node:crypto"; + +export interface TestProject { + root: string; + cleanup: () => void; +} + +export function createTestProject(files?: Record): TestProject { + const root = join(tmpdir(), `bb-test-${randomUUID().slice(0, 8)}`); + mkdirSync(root, { recursive: true }); + + if (files) { + for (const [relPath, content] of Object.entries(files)) { + const absPath = join(root, relPath); + mkdirSync(join(absPath, ".."), { recursive: true }); + writeFileSync(absPath, content); + } + } + + return { + root, + cleanup: () => { + // rmSync(root, { recursive: true, force: true }); + }, + }; +} + +export function createMinimalSchema(): string { + return ` +import { sqliteTable, text, integer } from "drizzle-orm/sqlite-core"; + +export const users = sqliteTable("users", { + id: text("id").primaryKey(), + name: text("name").notNull(), + email: text("email").notNull().unique(), + age: integer("age"), + createdAt: integer("created_at", { mode: "timestamp" }).notNull(), +}); + +export const posts = sqliteTable("posts", { + id: text("id").primaryKey(), + title: text("title").notNull(), + content: text("content"), + userId: text("user_id").references(() => users.id), + createdAt: integer("created_at", { mode: "timestamp" }).notNull(), +}); +`; +} + +export function createMinimalConfig(overrides?: Record): string { + return ` +import { defineConfig } from "@betterbase/core"; + +export default defineConfig({ + project: { name: "test-project" }, + ${overrides ? JSON.stringify(overrides, null, 2).slice(1, -1) : ""} +}); +`; +} +``` + +### 7.2 Database harness (`test/fixtures/database.ts`) + +```typescript +// test/fixtures/database.ts +import { Database } from "bun:sqlite"; + +export interface TestDatabase { + db: Database; + cleanup: () => void; +} + +export function createTestDatabase(): TestDatabase { + const db = new Database(":memory:"); + + // Create migrations tracking table + db.run(` + CREATE TABLE IF NOT EXISTS _betterbase_migrations ( + id INTEGER PRIMARY KEY AUTOINCREMENT, + name TEXT NOT NULL UNIQUE, + applied_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP, + checksum TEXT NOT NULL + ) + `); + + // Create webhook deliveries table + db.run(` + CREATE TABLE IF NOT EXISTS _betterbase_webhook_deliveries ( + id TEXT PRIMARY KEY, + webhook_id TEXT NOT NULL, + status TEXT NOT NULL, + request_url TEXT, + response_code INTEGER, + response_body TEXT, + error TEXT, + attempt_count INTEGER NOT NULL DEFAULT 1, + created_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP, + updated_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP + ) + `); + + return { + db, + cleanup: () => db.close(), + }; +} + +export function seedMigrationTracking( + db: Database, + migrations: { name: string; checksum: string }[], +): void { + const stmt = db.prepare( + "INSERT INTO _betterbase_migrations (name, checksum) VALUES (?, ?)", + ); + for (const m of migrations) { + stmt.run(m.name, m.checksum); + } +} + +export function seedWebhookDeliveries( + db: Database, + deliveries: { + id: string; + webhook_id: string; + status: string; + response_code?: number; + error?: string; + }[], +): void { + const stmt = db.prepare( + `INSERT INTO _betterbase_webhook_deliveries + (id, webhook_id, status, response_code, error, attempt_count) + VALUES (?, ?, ?, ?, ?, 1)`, + ); + for (const d of deliveries) { + stmt.run(d.id, d.webhook_id, d.status, d.response_code ?? null, d.error ?? null); + } +} +``` + +### 7.3 Config fixture generator + +```typescript +// test/fixtures/config.ts +import { createTestProject } from "./fixtures"; + +export const VALID_CONFIG_TS = ` +import { defineConfig } from "@betterbase/core"; + +export default defineConfig({ + project: { name: "test-project" }, + provider: { + type: "sqlite" as const, + connectionString: "local.db", + }, + storage: { + provider: "s3" as const, + bucket: "test-bucket", + region: "us-east-1", + }, + webhooks: [], +}); +`; + +export const CONFIG_WITH_WEBHOOKS = ` +import { defineConfig } from "@betterbase/core"; + +export default defineConfig({ + project: { name: "test-project" }, + webhooks: [ + { + id: "webhook-abc123", + table: "users", + events: ["INSERT", "UPDATE"], + url: "process.env.WEBHOOK_USERS_URL", + secret: "process.env.WEBHOOK_SECRET", + enabled: true, + }, + ], +}); +`; + +export const INVALID_CONFIG_TS = ` +export default { + project: { name: "test-project" }, + provider: { + type: "invalid-provider", + }, +}; +`; + +export function createConfigProject( + configContent: string = VALID_CONFIG_TS, +) { + return createTestProject({ + "betterbase.config.ts": configContent, + "package.json": JSON.stringify({ name: "test-project" }), + }); +} +``` + +### 7.4 Credential fixture generator + +```typescript +// test/fixtures/credentials.ts +import { join } from "node:path"; +import { mkdirSync, writeFileSync, rmSync, existsSync } from "node:fs"; +import { randomUUID } from "node:crypto"; +import { homedir } from "node:os"; + +const BETTERBASE_DIR = join(homedir(), ".betterbase"); +const CREDENTIALS_FILE = join(BETTERBASE_DIR, "credentials.json"); + +export interface CredentialFixture { + token: string; + admin_email: string; + server_url: string; + created_at: string; +} + +export function setupCredentialsFile( + credentials: CredentialFixture, +): () => void { + mkdirSync(BETTERBASE_DIR, { recursive: true }); + writeFileSync(CREDENTIALS_FILE, JSON.stringify(credentials)); + + return () => { + if (existsSync(CREDENTIALS_FILE)) { + rmSync(CREDENTIALS_FILE); + } + }; +} + +export function createValidCredentials(): CredentialFixture { + return { + token: `token_${randomUUID()}`, + admin_email: "admin@test.com", + server_url: "https://api.betterbase.io", + created_at: new Date().toISOString(), + }; +} + +export function createExpiredCredentials(): CredentialFixture { + return { + token: "expired_token", + admin_email: "admin@test.com", + server_url: "https://api.betterbase.io", + created_at: new Date(Date.now() - 365 * 24 * 60 * 60 * 1000).toISOString(), + }; +} +``` + +### 7.5 Fetch mock harness + +```typescript +// test/fixtures/fetch-mock.ts + +export interface MockFetchRoute { + method?: string; + url: string | RegExp; + status: number; + body: unknown; + headers?: Record; +} + +export function mockFetch( + routes: MockFetchRoute[], +): typeof globalThis.fetch & { calls: Request[] } { + const calls: Request[] = []; + + const mock = async (input: RequestInfo | URL, init?: RequestInit) => { + const url = typeof input === "string" ? input : input instanceof URL ? input.href : input.url; + const method = init?.method ?? "GET"; + const request = new Request(input instanceof Request ? input : url, init); + calls.push(request); + + for (const route of routes) { + const urlMatch = + typeof route.url === "string" + ? url.includes(route.url) + : route.url.test(url); + const methodMatch = !route.method || route.method === method; + + if (urlMatch && methodMatch) { + return new Response(JSON.stringify(route.body), { + status: route.status, + headers: { + "Content-Type": "application/json", + ...route.headers, + }, + }); + } + } + + return new Response(JSON.stringify({ error: "unmocked" }), { + status: 404, + headers: { "Content-Type": "application/json" }, + }); + }; + + (mock as unknown as { calls: Request[] }).calls = calls; + return mock as typeof globalThis.fetch & { calls: Request[] }; +} +``` + +--- + +## 8) Output format snapshot testing pattern + +```typescript +// test/snapshots/migrate-preview.txt (golden file) +// Generated 2026-04-29 — bb migrate preview with 2 tables, 1 modified column + +// test/output-snapshots.test.ts +import { describe, expect, it, beforeAll, afterAll } from "bun:test"; +import { readFileSync, writeFileSync } from "node:fs"; +import { join } from "node:path"; + +const SNAPSHOTS_DIR = join(import.meta.dir, "snapshots"); +const UPDATE_SNAPSHOTS = process.env.UPDATE_SNAPSHOTS === "true"; + +async function captureOutput(fn: () => Promise): Promise { + const originalLog = console.log; + const originalError = console.error; + const originalWarn = console.warn; + const lines: string[] = []; + + console.log = (...args: unknown[]) => { + lines.push(args.map((a) => String(a)).join(" ")); + }; + console.error = (...args: unknown[]) => { + lines.push(args.map((a) => String(a)).join(" ")); + }; + console.warn = (...args: unknown[]) => { + lines.push(args.map((a) => String(a)).join(" ")); + }; + + try { + await fn(); + } finally { + console.log = originalLog; + console.error = originalError; + console.warn = originalWarn; + } + + return lines.join("\n"); +} + +function assertSnapshot(name: string, actual: string): void { + const snapshotPath = join(SNAPSHOTS_DIR, `${name}.txt`); + + if (UPDATE_SNAPSHOTS) { + writeFileSync(snapshotPath, actual); + return; + } + + let expected: string; + try { + expected = readFileSync(snapshotPath, "utf-8"); + } catch { + throw new Error( + `Snapshot "${name}" not found. Run with UPDATE_SNAPSHOTS=true to generate.`, + ); + } + + expect(actual.trim()).toBe(expected.trim()); +} +``` + +Usage: + +```typescript +describe("bb migrate preview output", () => { + it("matches snapshot for 2-table schema", async () => { + const output = await captureOutput(async () => { + const changes: MigrationChange[] = [ + { type: "create_table", table: "users", isDestructive: false }, + { type: "create_table", table: "posts", isDestructive: false }, + { type: "modify_column", table: "posts", column: "title", isDestructive: false }, + ]; + displayDiff(changes); + }); + assertSnapshot("migrate-preview-2-tables", output); + }); +}); +``` + +--- + +## 9) Test file structure target (post-upgrade) + +``` +packages/cli/test/ +├── fixtures/ +│ ├── fixtures.ts # TestProject, createMinimalSchema, createMinimalConfig +│ ├── database.ts # createTestDatabase, seed helpers +│ ├── config.ts # Config file generators (valid, with webhooks, invalid) +│ ├── credentials.ts # credential file setup/teardown +│ └── fetch-mock.ts # MockFetchRoute, mockFetch() +├── snapshots/ +│ ├── migrate-preview-2-tables.txt +│ ├── webhook-list.txt +│ ├── webhook-logs.txt +│ ├── branch-list.txt +│ ├── function-list.txt +│ └── rls-list.txt +├── unit/ +│ ├── migrate-utils.test.ts # ✓ Keep (good) +│ ├── migrate.test.ts # ✓ Keep (good) +│ ├── prompts.test.ts # ✓ Keep (good) +│ ├── provider-prompts.test.ts # ✓ Keep (good) +│ ├── logger.test.ts # ✏ Expand (capture stdout) +│ ├── spinner.test.ts # ✚ New +│ ├── scanner.test.ts # ✏ Expand (5+ scenarios) +│ ├── route-scanner.test.ts # ✏ Expand (5+ scenarios) +│ ├── graphql-type-map.test.ts # ✏ Fix (import from source) +│ ├── credentials.test.ts # ✚ New +│ ├── api-client.test.ts # ✚ New +│ ├── config.test.ts # ✚ New +│ └── auth-providers.test.ts # ✚ New +├── integration/ +│ ├── init.test.ts # ✚ Rewrite (scaffold project) +│ ├── dev.test.ts # ✚ Rewrite (server lifecycle) +│ ├── migrate-from-convex.test.ts # ✓ Keep (good) +│ ├── context-generator.test.ts # ✓ Keep (good) +│ ├── generate-crud.test.ts # ✓ Keep (good) +│ ├── edge-cases.test.ts # ✓ Keep (good) +│ ├── auth-command.test.ts # ✏ Expand (addProvider) +│ ├── login-commands.test.ts # ✚ Rewrite (credential lifecycle) +│ ├── webhook-commands.test.ts # ✚ Rewrite (full lifecycle) +│ ├── branch-commands.test.ts # ✚ Rewrite (full lifecycle) +│ ├── function-commands.test.ts # ✚ Rewrite (full lifecycle) +│ ├── storage-commands.test.ts # ✚ Rewrite (full lifecycle) +│ ├── rls-commands.test.ts # ✚ Rewrite (full lifecycle) +│ ├── rls-test-command.test.ts # ✚ Rewrite (PG schema) +│ ├── iac-commands.test.ts # ✏ Expand (import from source) +│ └── iac-workflow.test.ts # ✚ New (sync → generate pipeline) +├── e2e/ +│ └── binary-smoke.test.ts # ✚ New +├── cli/ +│ ├── smoke.test.ts # ✏ Expand (full subcommand tree) +│ ├── cli-parsing.test.ts # ✚ New (argv → parsed options) +│ └── output-snapshots.test.ts # ✚ New (format regression) +└── error-messages.test.ts # ✏ Expand (real code paths) +``` + +Legend: ✓ Keep | ✏ Expand | ✚ New + +--- + +## 10) Acceptance criteria + +### Phase 1 (Foundation) +- [ ] `credentials.test.ts` passes: save, load, clear, corrupt file, expired, Zod validation +- [ ] `api-client.test.ts` passes: valid token, expired token, 401, network error +- [ ] `login-commands.test.ts` passes: device code mock, API key mock, logout, isAuthenticated +- [ ] `config.test.ts` passes: findConfigFile discovers .ts/.js/.mts, loadConfig parses/rejects +- [ ] `cli-parsing.test.ts` passes: 10+ subcommands with argv matrix +- [ ] Zero `expect(true).toBe(true)` assertions remain in the codebase + +### Phase 2 (Stub replacement) +- [ ] All 8 stub files replaced with real behavioral tests +- [ ] `branch-commands.test.ts`: create → list → status → sleep → wake → delete lifecycle +- [ ] `webhook-commands.test.ts`: create → list → test dispatch → query logs +- [ ] `function-commands.test.ts`: create → dev → build → list → deploy → logs +- [ ] `storage-commands.test.ts`: init prompt flow → config mutation → upload → list +- [ ] `rls-commands.test.ts`: create → list → disable → duplicate warning +- [ ] `rls-test-command.test.ts`: PostgreSQL RLS evaluation → JSON results → cleanup +- [ ] `dev.test.ts`: start server → health check → file change → shutdown +- [ ] `auth-commands.test.ts` deleted (redundant) + +### Phase 3 (Coverage expansion) +- [ ] `scanner.test.ts`: 5+ scenarios (empty, no relations, circular FK, enums, large schema) +- [ ] `route-scanner.test.ts`: 5+ scenarios (PATCH/DELETE, no-auth, nested groups, malformed) +- [ ] `graphql-type-map.test.ts`: imports from source, not duplicated +- [ ] `iac-commands.test.ts`: imports and exercises source functions +- [ ] Output snapshot tests: 5+ commands with golden file comparison +- [ ] `UPDATE_SNAPSHOTS=true` regenerates all golden files in one pass + +### Phase 4 (Deep integration) +- [ ] Dev server integration: start → HTTP 200 → file change → restart → SIGTERM cleanup +- [ ] IAC workflow: sync detects changes → generate produces api.d.ts → analyze reports complexity +- [ ] `spinner.test.ts`: withSpinner success and failure paths +- [ ] Binary smoke: `bb --version`, `bb --help`, `bb init --help` exit codes +- [ ] Cross-product: migrate → graphql regenerate → context regenerate chain + +### Non-acceptance criteria +- [ ] No `expect(true).toBe(true)` in any test file +- [ ] No locally duplicated logic (import from source, don't copy-paste) +- [ ] No test that depends on network access (all fetch calls mocked) +- [ ] No test that depends on a running server process outside the test's control +- [ ] Every test file cleans up temp directories and database connections in `afterAll`/`finally` + +--- + +## 11) Dependencies between phases + +``` +Phase 1 (Foundation) + ├─ fixtures.ts ──────────────► Everything + ├─ database.ts ──────────────► Phase 2: webhook, rls-test + │ ► Phase 4: migrate integration + ├─ credentials.test.ts ──────► Phase 2: login-commands + ├─ api-client.test.ts ──────► Phase 2: login-commands, all authenticated tests + ├─ config.test.ts ───────────► Phase 2: branch, webhook, storage + └─ cli-parsing.test.ts ──────► Phase 2-4: all command tests + +Phase 2 (Stub replacement) + ├─ webhook-commands ─────────► Phase 3: output snapshots + ├─ branch-commands ──────────► Phase 3: output snapshots + ├─ dev.test.ts ──────────────► Phase 4: dev server integration + └─ rls-test ─────────────────► Phase 3: PostgreSQL integration + +Phase 3 (Coverage expansion) + ├─ output snapshots ─────────► Phase 4: e2e visual regression + └─ iac-commands ─────────────► Phase 4: IAC workflow + +Phase 4 (Deep integration) + └─ (leaf phase — final hardening) +``` + +--- + +## 12) Risk register + +| Risk | Severity | Mitigation | +|------|----------|------------| +| PostgreSQL required for RLS test tests | Medium | Use `pg-mem` or Docker container; skip if PG unavailable | +| Dev server tests flaky (port binding, timing) | Medium | Use random ports, retry logic, generous timeouts | +| `execSync("bunx drizzle-kit push")` in auth tests blocks | Low | Catch failure gracefully (already done); mock if needed | +| Snapshot files drift from code changes | Low | `UPDATE_SNAPSHOTS=true` regenerates; CI asserts clean | +| Mocking `import()` for config loading is fragile | Medium | Use real temp files with `betterbase.config.ts`; avoid module mocking | +| Bun.spawn for e2e binary tests slow in CI | Low | Keep scope small (version, help, init --help only) | + +--- + +## 13) Estimated effort summary + +| Phase | New Files | Rewrites | Expansions | Est. Person-Weeks | +|-------|-----------|----------|------------|-------------------| +| Phase 1 (Foundation) | 7 | 1 | 0 | 2 | +| Phase 2 (Stub replacement) | 0 | 8 | 0 | 3 | +| Phase 3 (Coverage expansion) | 2 | 0 | 7 | 2 | +| Phase 4 (Deep integration) | 2 | 0 | 0 | 2 | +| **Total** | **11** | **9** | **7** | **9** | + +--- + +## 14) Related documentation + +- [CLI Overview](../cli/overview.md) — Full command reference +- [API Reference — CLI Commands](../api-reference/cli-commands.md) — API-level details +- [Core Hardening Review v3](../core/hardening-review-v3.md) — Security/reliability baseline +- [Configuration](../core/config.md) — Config schema and validation +- [Migration Guide](../core/migration.md) — Database migration patterns diff --git a/packages/cli/src/commands/auth.ts b/packages/cli/src/commands/auth.ts index 52be6cb..94feaea 100644 --- a/packages/cli/src/commands/auth.ts +++ b/packages/cli/src/commands/auth.ts @@ -449,7 +449,7 @@ export async function runAuthAddProviderCommand( : `# ${template.displayName} OAuth\n${envVarsToAdd.join("\n")}\n`; writeFileSync(envFile, newEnv, "utf-8"); - logger.success(`✅ Added env vars to .env`); + logger.success("\u2705 Added env vars to .env"); } // Print setup instructions @@ -459,7 +459,7 @@ export async function runAuthAddProviderCommand( authUrl, ); - console.log("\n" + "=".repeat(60)); + console.log(`\n${"=".repeat(60)}`); console.log(`${template.displayName} OAuth Setup Instructions:`); console.log(instructions); console.log("=".repeat(60)); diff --git a/packages/cli/src/commands/branch.ts b/packages/cli/src/commands/branch.ts index c9b05a1..6aa24b6 100644 --- a/packages/cli/src/commands/branch.ts +++ b/packages/cli/src/commands/branch.ts @@ -5,9 +5,6 @@ * Provides commands to create, list, delete, sleep, and wake preview environments. */ -import { readFile } from "node:fs/promises"; -import { resolve } from "node:path"; -import type { BetterBaseConfig } from "@betterbase/core"; import { type BranchConfig, type BranchListResult, @@ -18,29 +15,10 @@ import { getAllBranches, } from "@betterbase/core/branching"; import { CONFIG_FILE_NAME } from "@betterbase/shared"; +import chalk from "chalk"; import * as logger from "../utils/logger"; +import { loadConfig } from "../utils/config"; -/** - * Load BetterBase configuration from project root - * @param projectRoot - Path to the project root - * @returns BetterBase configuration - */ -async function loadConfig(projectRoot: string): Promise { - const configPath = resolve(projectRoot, CONFIG_FILE_NAME); - try { - const configContent = await readFile(configPath, "utf-8"); - // Extract the config object from the file - const configModule = await import(configPath); - return configModule.default || configModule.config || null; - } catch { - return null; - } -} - -/** - * Run the branch create command - * @param args - Command arguments [name, projectRoot] - */ export async function runBranchCreateCommand( args: string[], projectRoot: string = process.cwd(), @@ -48,122 +26,87 @@ export async function runBranchCreateCommand( const name = args[0]; if (!name) { - logger.error("Branch name is required. Usage: bb branch create "); - process.exit(1); + throw new Error("Branch name is required. Usage: bb branch create "); } - logger.info(`Creating preview environment: ${name}`); + logger.info(`Creating preview environment: ${chalk.cyan(name)}`); - try { - // Load configuration - const config = await loadConfig(projectRoot); - if (!config) { - logger.error( - `Could not load configuration from ${CONFIG_FILE_NAME}. Make sure you're in a BetterBase project directory.`, - ); - process.exit(1); - } + const config = await loadConfig(projectRoot); + if (!config) { + throw new Error( + `Could not load configuration from ${CONFIG_FILE_NAME}. Make sure you're in a BetterBase project directory.`, + ); + } - // Create branch manager - const branchManager = createBranchManager(config); + const branchManager = createBranchManager(config); - // Create branch options - const options: CreateBranchOptions = { - name, - sourceBranch: "main", - copyDatabase: true, - copyStorage: true, - }; + const options: CreateBranchOptions = { + name, + sourceBranch: "main", + copyDatabase: true, + copyStorage: true, + }; - // Create the branch - const result = await branchManager.createBranch(options); + const result = await branchManager.createBranch(options); - if (!result.success) { - logger.error(`Failed to create preview environment: ${result.error}`); - process.exit(1); - } + if (!result.success) { + throw new Error(`Failed to create preview environment: ${result.error}`); + } - const branch = result.branch!; - logger.success("Preview environment created successfully!"); - logger.info(` Name: ${branch.name}`); - logger.info(` Preview URL: ${branch.previewUrl}`); - logger.info(` Status: ${branch.status}`); - - if (result.warnings && result.warnings.length > 0) { - logger.warn("Warnings:"); - for (const warning of result.warnings) { - logger.warn(` - ${warning}`); - } - } + const branch = result.branch!; + logger.section("Preview environment created"); + logger.keyValue("Name", branch.name); + logger.keyValue("Preview URL", branch.previewUrl); + logger.keyValue("Status", branch.status); - if (branch.databaseConnectionString) { - logger.info(" Database: Cloned from main"); + if (result.warnings && result.warnings.length > 0) { + logger.warn("Warnings:"); + for (const warning of result.warnings) { + logger.warn(` - ${warning}`); } + } - if (branch.storageBucket) { - logger.info(` Storage: ${branch.storageBucket}`); - } - } catch (error) { - const message = error instanceof Error ? error.message : String(error); - logger.error(`Error creating preview environment: ${message}`); - process.exit(1); + if (branch.databaseConnectionString) { + logger.info("Database: Cloned from main"); + } + + if (branch.storageBucket) { + logger.keyValue("Storage", branch.storageBucket); } } -/** - * Run the branch list command - * @param args - Command arguments - * @param projectRoot - Path to the project root - */ export async function runBranchListCommand( args: string[] = [], projectRoot: string = process.cwd(), ): Promise { - try { - // Load configuration - const config = await loadConfig(projectRoot); - if (!config) { - logger.error( - `Could not load configuration from ${CONFIG_FILE_NAME}. Make sure you're in a BetterBase project directory.`, - ); - process.exit(1); - } - - // Create branch manager - const branchManager = createBranchManager(config); + const config = await loadConfig(projectRoot); + if (!config) { + throw new Error( + `Could not load configuration from ${CONFIG_FILE_NAME}. Make sure you're in a BetterBase project directory.`, + ); + } - // List all branches - const result = branchManager.listBranches(); + const branchManager = createBranchManager(config); + const result = branchManager.listBranches(); - if (result.branches.length === 0) { - logger.info("No preview environments found."); - logger.info("Run 'bb branch create ' to create one."); - return; - } + if (result.branches.length === 0) { + logger.info("No preview environments found."); + logger.info("Run 'bb branch create ' to create one."); + return; + } - logger.info(`Found ${result.total} preview environment(s):\n`); + logger.section(`Preview Environments (${result.total})`); - // Display each branch - for (const branch of result.branches) { - logger.info(` ${branch.name}`); - logger.info(` Status: ${branch.status}`); - logger.info(` URL: ${branch.previewUrl}`); - logger.info(` Created: ${branch.createdAt.toISOString()}`); - logger.info(` Last accessed: ${branch.lastAccessedAt.toISOString()}`); - logger.info(""); - } - } catch (error) { - const message = error instanceof Error ? error.message : String(error); - logger.error(`Error listing preview environments: ${message}`); - process.exit(1); + for (const branch of result.branches) { + const statusColor = branch.status === "active" ? chalk.green : branch.status === "sleeping" ? chalk.yellow : chalk.dim; + console.log(` ${chalk.bold(branch.name)} ${statusColor(`(${branch.status})`)}`); + console.log(chalk.dim(` URL: ${branch.previewUrl}`)); + console.log(chalk.dim(` Created: ${branch.createdAt.toISOString().split("T")[0]}`)); + console.log(chalk.dim(` Last: ${branch.lastAccessedAt.toISOString().split("T")[0]}`)); + console.log(""); } } -/** - * Run the branch delete command - * @param args - Command arguments [name] - * @param projectRoot - Path to the project root - */ export async function runBranchDeleteCommand( args: string[], projectRoot: string = process.cwd(), @@ -171,60 +114,41 @@ export async function runBranchDeleteCommand( const name = args[0]; if (!name) { - logger.error("Branch name is required. Usage: bb branch delete "); - process.exit(1); + throw new Error("Branch name is required. Usage: bb branch delete "); } - logger.info(`Deleting preview environment: ${name}`); + logger.info(`Deleting preview environment: ${chalk.cyan(name)}`); - try { - // Load configuration - const config = await loadConfig(projectRoot); - if (!config) { - logger.error( - `Could not load configuration from ${CONFIG_FILE_NAME}. Make sure you're in a BetterBase project directory.`, - ); - process.exit(1); - } + const config = await loadConfig(projectRoot); + if (!config) { + throw new Error( + `Could not load configuration from ${CONFIG_FILE_NAME}. Make sure you're in a BetterBase project directory.`, + ); + } - // Create branch manager - const branchManager = createBranchManager(config); + const branchManager = createBranchManager(config); + const branch = branchManager.getBranchByName(name); - // Find branch by name - const branch = branchManager.getBranchByName(name); - if (!branch) { - logger.error(`Preview environment '${name}' not found.`); - process.exit(1); - } + if (!branch) { + throw new Error(`Preview environment '${name}' not found.`); + } - // Delete the branch - const result = await branchManager.deleteBranch(branch.id); + const result = await branchManager.deleteBranch(branch.id); - if (!result.success) { - logger.error(`Failed to delete preview environment: ${result.error}`); - process.exit(1); - } + if (!result.success) { + throw new Error(`Failed to delete preview environment: ${result.error}`); + } - logger.success(`Preview environment '${name}' deleted successfully!`); + logger.success(`Preview environment '${chalk.cyan(name)}' deleted.`); - if (result.warnings && result.warnings.length > 0) { - logger.warn("Warnings:"); - for (const warning of result.warnings) { - logger.warn(` - ${warning}`); - } + if (result.warnings && result.warnings.length > 0) { + logger.warn("Warnings:"); + for (const warning of result.warnings) { + logger.warn(` - ${warning}`); } - } catch (error) { - const message = error instanceof Error ? error.message : String(error); - logger.error(`Error deleting preview environment: ${message}`); - process.exit(1); } } -/** - * Run the branch sleep command - * @param args - Command arguments [name] - * @param projectRoot - Path to the project root - */ export async function runBranchSleepCommand( args: string[], projectRoot: string = process.cwd(), @@ -232,54 +156,35 @@ export async function runBranchSleepCommand( const name = args[0]; if (!name) { - logger.error("Branch name is required. Usage: bb branch sleep "); - process.exit(1); + throw new Error("Branch name is required. Usage: bb branch sleep "); } - logger.info(`Putting preview environment to sleep: ${name}`); - - try { - // Load configuration - const config = await loadConfig(projectRoot); - if (!config) { - logger.error( - `Could not load configuration from ${CONFIG_FILE_NAME}. Make sure you're in a BetterBase project directory.`, - ); - process.exit(1); - } + logger.info(`Putting preview environment to sleep: ${chalk.cyan(name)}`); - // Create branch manager - const branchManager = createBranchManager(config); + const config = await loadConfig(projectRoot); + if (!config) { + throw new Error( + `Could not load configuration from ${CONFIG_FILE_NAME}. Make sure you're in a BetterBase project directory.`, + ); + } - // Find branch by name - const branch = branchManager.getBranchByName(name); - if (!branch) { - logger.error(`Preview environment '${name}' not found.`); - process.exit(1); - } + const branchManager = createBranchManager(config); + const branch = branchManager.getBranchByName(name); - // Sleep the branch - const result = await branchManager.sleepBranch(branch.id); + if (!branch) { + throw new Error(`Preview environment '${name}' not found.`); + } - if (!result.success) { - logger.error(`Failed to sleep preview environment: ${result.error}`); - process.exit(1); - } + const result = await branchManager.sleepBranch(branch.id); - logger.success(`Preview environment '${name}' is now sleeping!`); - logger.info("You can wake it up later with 'bb branch wake '"); - } catch (error) { - const message = error instanceof Error ? error.message : String(error); - logger.error(`Error putting preview environment to sleep: ${message}`); - process.exit(1); + if (!result.success) { + throw new Error(`Failed to sleep preview environment: ${result.error}`); } + + logger.success(`Preview environment '${chalk.cyan(name)}' is now sleeping.`); + logger.info("Wake it up later with 'bb branch wake '"); } -/** - * Run the branch wake command - * @param args - Command arguments [name] - * @param projectRoot - Path to the project root - */ export async function runBranchWakeCommand( args: string[], projectRoot: string = process.cwd(), @@ -287,100 +192,82 @@ export async function runBranchWakeCommand( const name = args[0]; if (!name) { - logger.error("Branch name is required. Usage: bb branch wake "); - process.exit(1); + throw new Error("Branch name is required. Usage: bb branch wake "); } - logger.info(`Waking preview environment: ${name}`); - - try { - // Load configuration - const config = await loadConfig(projectRoot); - if (!config) { - logger.error( - `Could not load configuration from ${CONFIG_FILE_NAME}. Make sure you're in a BetterBase project directory.`, - ); - process.exit(1); - } + logger.info(`Waking preview environment: ${chalk.cyan(name)}`); - // Create branch manager - const branchManager = createBranchManager(config); + const config = await loadConfig(projectRoot); + if (!config) { + throw new Error( + `Could not load configuration from ${CONFIG_FILE_NAME}. Make sure you're in a BetterBase project directory.`, + ); + } - // Find branch by name - const branch = branchManager.getBranchByName(name); - if (!branch) { - logger.error(`Preview environment '${name}' not found.`); - process.exit(1); - } + const branchManager = createBranchManager(config); + const branch = branchManager.getBranchByName(name); - // Wake the branch - const result = await branchManager.wakeBranch(branch.id); + if (!branch) { + throw new Error(`Preview environment '${name}' not found.`); + } - if (!result.success) { - logger.error(`Failed to wake preview environment: ${result.error}`); - process.exit(1); - } + const result = await branchManager.wakeBranch(branch.id); - logger.success(`Preview environment '${name}' is now active!`); - logger.info(`Preview URL: ${branch.previewUrl}`); - } catch (error) { - const message = error instanceof Error ? error.message : String(error); - logger.error(`Error waking preview environment: ${message}`); - process.exit(1); + if (!result.success) { + throw new Error(`Failed to wake preview environment: ${result.error}`); } + + logger.success(`Preview environment '${chalk.cyan(name)}' is now active!`); + logger.keyValue("Preview URL", branch.previewUrl); } -/** - * Run the branch command (main dispatcher) - * @param args - Command arguments - * @param projectRoot - Path to the project root - */ export async function runBranchCommand( args: string[] = [], projectRoot: string = process.cwd(), ): Promise { const action = args[0]; - switch (action) { - case "create": - await runBranchCreateCommand(args.slice(1), projectRoot); - break; - case "list": - case "ls": - await runBranchListCommand(args.slice(1), projectRoot); - break; - case "delete": - case "remove": - case "rm": - await runBranchDeleteCommand(args.slice(1), projectRoot); - break; - case "sleep": - await runBranchSleepCommand(args.slice(1), projectRoot); - break; - case "wake": - await runBranchWakeCommand(args.slice(1), projectRoot); - break; - case undefined: - // No action specified, show help - logger.info("Usage: bb branch [options]"); - logger.info(""); - logger.info("Commands:"); - logger.info(" create Create a new preview environment"); - logger.info(" list List all preview environments"); - logger.info(" delete Delete a preview environment"); - logger.info(" sleep Put a preview environment to sleep"); - logger.info(" wake Wake a sleeping preview environment"); - logger.info(""); - logger.info("Examples:"); - logger.info(" bb branch create my-feature"); - logger.info(" bb branch list"); - logger.info(" bb branch delete my-feature"); - logger.info(" bb branch sleep my-feature"); - logger.info(" bb branch wake my-feature"); - break; - default: - logger.error(`Unknown branch command: ${action}`); - logger.info("Run 'bb branch' for usage information."); - process.exit(1); + try { + switch (action) { + case "create": + await runBranchCreateCommand(args.slice(1), projectRoot); + break; + case "list": + case "ls": + await runBranchListCommand(args.slice(1), projectRoot); + break; + case "delete": + case "remove": + case "rm": + await runBranchDeleteCommand(args.slice(1), projectRoot); + break; + case "sleep": + await runBranchSleepCommand(args.slice(1), projectRoot); + break; + case "wake": + await runBranchWakeCommand(args.slice(1), projectRoot); + break; + case undefined: + logger.info("Usage: bb branch [options]"); + logger.blank(); + logger.info("Commands:"); + logger.info(" create Create a new preview environment"); + logger.info(" list List all preview environments"); + logger.info(" delete Delete a preview environment"); + logger.info(" sleep Put a preview environment to sleep"); + logger.info(" wake Wake a sleeping preview environment"); + logger.blank(); + logger.info("Examples:"); + logger.info(" bb branch create my-feature"); + logger.info(" bb branch list"); + logger.info(" bb branch delete my-feature"); + break; + default: + throw new Error(`Unknown branch command: ${action}`); + } + } catch (error) { + const message = error instanceof Error ? error.message : String(error); + logger.error(message); + throw error; } } diff --git a/packages/cli/src/commands/generate.ts b/packages/cli/src/commands/generate.ts index f7e51a8..374e76d 100644 --- a/packages/cli/src/commands/generate.ts +++ b/packages/cli/src/commands/generate.ts @@ -2,7 +2,7 @@ import { existsSync, mkdirSync, readFileSync, writeFileSync } from "node:fs"; import path from "node:path"; import chalk from "chalk"; import * as logger from "../utils/logger"; -import { SchemaScanner, type TableInfo } from "../utils/schema-scanner"; +import { SchemaScanner, type TableInfo } from "../utils/scanner"; import { withSpinner } from "../utils/spinner"; import { runGenerateGraphqlCommand } from "./graphql"; diff --git a/packages/cli/src/commands/graphql.ts b/packages/cli/src/commands/graphql.ts index b75157c..acaed88 100644 --- a/packages/cli/src/commands/graphql.ts +++ b/packages/cli/src/commands/graphql.ts @@ -7,7 +7,7 @@ import { existsSync, mkdirSync, readFileSync, writeFileSync } from "node:fs"; import path from "node:path"; import * as logger from "../utils/logger"; -import { SchemaScanner } from "../utils/schema-scanner"; +import { SchemaScanner } from "../utils/scanner"; /** * Type for Drizzle table objects - using a generic approach to avoid type issues diff --git a/packages/cli/src/commands/login.ts b/packages/cli/src/commands/login.ts index b1b845c..1f23e4c 100644 --- a/packages/cli/src/commands/login.ts +++ b/packages/cli/src/commands/login.ts @@ -1,36 +1,11 @@ import chalk from "chalk"; -import type { Command } from "commander"; import { clearCredentials, loadCredentials, saveCredentials } from "../utils/credentials"; import { blank, box, error, keyValue, section, success, sym } from "../utils/logger"; import { createSpinner } from "../utils/spinner"; const DEFAULT_SERVER_URL = "https://api.betterbase.io"; const POLL_INTERVAL_MS = 5000; -const POLL_TIMEOUT_MS = 5 * 60 * 1000; // 5 minutes - -export function registerLoginCommand(program: Command) { - program - .command("login") - .description("Authenticate with a Betterbase instance") - .option("--url ", "Self-hosted Betterbase server URL", DEFAULT_SERVER_URL) - .option("--email ", "Admin email (for API key login)") - .option("--password ", "Admin password (for API key login)") - .action(async (opts) => { - if (opts.email && opts.password) { - await runApiKeyLogin({ serverUrl: opts.url, email: opts.email, password: opts.password }); - } else { - await runLoginCommand({ serverUrl: opts.url }); - } - }); - - program - .command("logout") - .description("Clear stored credentials") - .action(() => { - clearCredentials(); - success("Logged out."); - }); -} +const POLL_TIMEOUT_MS = 5 * 60 * 1000; export async function runLoginCommand(opts: { serverUrl?: string } = {}) { const serverUrl = (opts.serverUrl ?? DEFAULT_SERVER_URL).replace(/\/$/, ""); @@ -181,11 +156,6 @@ export async function runApiKeyLogin(opts: { } } -// Legacy exports for compatibility -export async function runLoginCommandLegacy(): Promise { - await runLoginCommand({}); -} - export async function runLogoutCommand(): Promise { clearCredentials(); success("Logged out."); diff --git a/packages/cli/src/commands/migrate.ts b/packages/cli/src/commands/migrate.ts index 83d6d8e..03c70c4 100644 --- a/packages/cli/src/commands/migrate.ts +++ b/packages/cli/src/commands/migrate.ts @@ -19,6 +19,7 @@ import { const migrateOptionsSchema = z.object({ preview: z.boolean().optional(), production: z.boolean().optional(), + projectRoot: z.string().optional(), }); export type MigrateCommandOptions = z.infer; @@ -52,16 +53,84 @@ interface MigrationBackup { const DRIZZLE_DIR = "drizzle"; const DRIZZLE_TIMEOUT_MS = 30_000; +export async function runMigrateCommand(rawOptions: MigrateCommandOptions): Promise { + const startTime = Date.now(); + const options = migrateOptionsSchema.parse(rawOptions); + const projectRoot = options.projectRoot ?? process.cwd(); + + const changes = await withSpinner( + "Generating migration files...", + async () => await collectChangesFromGenerate(projectRoot), + { successText: "Migration files generated" }, + ); + displayDiff(changes); + + if (options.preview) { + logger.info("Preview mode enabled. No migrations applied."); + return; + } + + if (options.production) { + const proceed = await prompts.confirm({ + message: "Apply migrations to production now?", + initial: false, + }); + if (!proceed) { + logger.warn("Migration cancelled by user."); + return; + } + } + + let backup: MigrationBackup | null = null; + if (changes.some((change) => change.isDestructive)) { + backup = await backupDatabase(projectRoot); + const confirmed = await confirmDestructive(changes); + if (!confirmed) return; + } + + logger.info("drizzle/ files are for preview; running push will apply changes."); + const push = await withSpinner( + "Applying migration changes...", + async () => await runDrizzleKit(["push"], projectRoot), + { successText: "Applied migration changes" }, + ); + + if (!push.success) { + await restoreBackup(backup); + + if (/\b(?:connect(?:ion)?|econnrefused|econnreset|enotfound|etimedout)\b/i.test(push.stderr)) { + throw new Error(`Database connection failed while applying migration.\n${push.stderr}`); + } + + if (/conflict|merge/i.test(push.stderr)) { + throw new Error( + `Migration conflict detected during push. Please resolve and retry.\n${push.stderr}`, + ); + } + + throw new Error(`Migration push failed.\n${push.stderr || push.stdout}`); + } + + logger.done(startTime, "Migration complete"); + + logger.info("Regenerating GraphQL schema..."); + try { + await runGenerateGraphqlCommand(projectRoot); + } catch (err) { + logger.warn(`Failed to regenerate GraphQL: ${(err as Error).message}`); + } +} + function captureIdentifier(match: RegExpMatchArray, startIndex: number): string { return match[startIndex] ?? match[startIndex + 1] ?? match[startIndex + 2] ?? ""; } -async function runDrizzleKit(args: string[]): Promise { +async function runDrizzleKit(args: string[], cwd: string = process.cwd()): Promise { const controller = new AbortController(); const timeout = setTimeout(() => controller.abort(), DRIZZLE_TIMEOUT_MS); const proc = Bun.spawn(["bunx", "drizzle-kit", ...args], { - cwd: process.cwd(), + cwd, stdout: "pipe", stderr: "pipe", signal: controller.signal, @@ -86,9 +155,9 @@ async function runDrizzleKit(args: string[]): Promise { } } -async function listSqlFiles(baseDir: string): Promise> { +async function listSqlFiles(baseDir: string, cwd: string = process.cwd()): Promise> { const entries = new Map(); - const root = path.join(process.cwd(), baseDir); + const root = path.join(cwd, baseDir); const walk = async (dir: string): Promise => { try { @@ -206,10 +275,10 @@ export function analyzeMigration(sqlStatements: string[]): MigrationChange[] { } function displayDiff(changes: MigrationChange[]): void { - console.log("\n📊 Migration Preview\n"); + logger.section("Migration Preview"); if (changes.length === 0) { - console.log(chalk.gray("No schema changes detected.")); + logger.dim("No schema changes detected."); return; } @@ -219,38 +288,38 @@ function displayDiff(changes: MigrationChange[]): void { const destructive = changes.filter((c) => c.isDestructive); if (newTables.length) { - console.log(chalk.green("✅ New Tables:")); + console.log(chalk.green.bold("New Tables:")); for (const change of newTables) { console.log(chalk.green(` + ${change.table}`)); } - console.log(""); + logger.blank(); } if (newColumns.length) { - console.log(chalk.green("✅ New Columns:")); + console.log(chalk.green.bold("New Columns:")); for (const change of newColumns) { console.log(chalk.green(` + ${change.table}.${change.column ?? ""}`)); } - console.log(""); + logger.blank(); } if (modified.length) { - console.log(chalk.yellow("⚠️ Modified Columns:")); + console.log(chalk.yellow.bold("Modified Columns:")); for (const change of modified) { - console.log(chalk.yellow(` ! ${change.table}.${change.column ?? ""}`)); + console.log(chalk.yellow(` ~ ${change.table}.${change.column ?? ""}`)); } - console.log(""); + logger.blank(); } if (destructive.length) { - console.log(chalk.red("❌ Destructive Changes:")); + console.log(chalk.red.bold("Destructive Changes:")); for (const change of destructive) { console.log( chalk.red(` - ${change.type}: ${change.table}${change.column ? `.${change.column}` : ""}`), ); - console.log(chalk.red(" ⚠️ This will DELETE DATA")); + console.log(chalk.red(` ${logger.sym.warn} This will DELETE DATA`)); } - console.log(""); + logger.blank(); } } @@ -278,7 +347,7 @@ async function confirmDestructive(changes: MigrationChange[]): Promise return true; } -async function backupDatabase(): Promise { +async function backupDatabase(projectRoot: string = process.cwd()): Promise { const sourcePath = process.env.DB_PATH ?? DEFAULT_DB_PATH; try { @@ -289,7 +358,7 @@ async function backupDatabase(): Promise { } const timestamp = new Date().toISOString().replace(/:/g, "-"); - const backupDir = path.join(process.cwd(), "backups"); + const backupDir = path.join(projectRoot, "backups"); await mkdir(backupDir, { recursive: true }); const backupPath = path.join(backupDir, `db-${timestamp}.sqlite`); @@ -410,9 +479,9 @@ export function splitStatements(sql: string): string[] { return statements; } -async function collectChangesFromGenerate(): Promise { - const before = await listSqlFiles(DRIZZLE_DIR); - const generate = await runDrizzleKit(["generate"]); +async function collectChangesFromGenerate(projectRoot: string): Promise { + const before = await listSqlFiles(DRIZZLE_DIR, projectRoot); + const generate = await runDrizzleKit(["generate"], projectRoot); if (!generate.success) { if (/conflict|merge/i.test(generate.stderr)) { @@ -424,7 +493,7 @@ async function collectChangesFromGenerate(): Promise { throw new Error(`Failed to generate migrations.\n${generate.stderr || generate.stdout}`); } - const after = await listSqlFiles(DRIZZLE_DIR); + const after = await listSqlFiles(DRIZZLE_DIR, projectRoot); const changedSql: string[] = []; for (const [relativePath, content] of after.entries()) { @@ -439,80 +508,6 @@ async function collectChangesFromGenerate(): Promise { return analyzeMigration(changedSql); } -export async function runMigrateCommand(rawOptions: MigrateCommandOptions): Promise { - const startTime = Date.now(); - const options = migrateOptionsSchema.parse(rawOptions); - - const changes = await withSpinner( - "Generating migration files...", - async () => await collectChangesFromGenerate(), - { successText: "Migration files generated" }, - ); - displayDiff(changes); - - if (options.preview) { - logger.info("Preview mode enabled. No migrations applied."); - return; - } - - if (options.production) { - const proceed = await prompts.confirm({ - message: "Apply migrations to production now?", - initial: false, - }); - if (!proceed) { - logger.warn("Migration cancelled by user."); - return; - } - } - - let backup: MigrationBackup | null = null; - if (changes.some((change) => change.isDestructive)) { - backup = await backupDatabase(); - const confirmed = await confirmDestructive(changes); - if (!confirmed) return; - } - - logger.info("drizzle/ files are for preview; running push will apply changes."); - const push = await withSpinner( - "Applying migration changes...", - async () => await runDrizzleKit(["push"]), - { successText: "Applied migration changes" }, - ); - - if (!push.success) { - await restoreBackup(backup); - - if (/\b(?:connect(?:ion)?|econnrefused|econnreset|enotfound|etimedout)\b/i.test(push.stderr)) { - throw new Error(`Database connection failed while applying migration.\n${push.stderr}`); - } - - if (/conflict|merge/i.test(push.stderr)) { - throw new Error( - `Migration conflict detected during push. Please resolve and retry.\n${push.stderr}`, - ); - } - - throw new Error(`Migration push failed.\n${push.stderr || push.stdout}`); - } - - logger.done(startTime, "Migration complete"); - - // Regenerate GraphQL schema after migration - // Use the directory where the migration was run (current working directory) - logger.info("Regenerating GraphQL schema..."); - try { - const projectRoot = process.cwd(); - await runGenerateGraphqlCommand(projectRoot); - } catch (err) { - logger.warn(`Failed to regenerate GraphQL: ${(err as Error).message}`); - } -} - -/** - * Get database connection based on environment - * Supports both SQLite (local) and PostgreSQL (remote) - */ async function getDatabaseConnection(): Promise { const dbPath = process.env.DB_PATH ?? DEFAULT_DB_PATH; @@ -606,30 +601,21 @@ export async function runMigrateRollbackCommand( logger.info(`Rolling back last ${steps} migration(s)...`); - // Change to project directory - const originalCwd = process.cwd(); - if (projectRoot !== originalCwd) { - process.chdir(projectRoot); - } - let db: Database; try { db = await getDatabaseConnection(); } catch (err) { - logger.error(`Failed to connect to database: ${(err as Error).message}`); - process.chdir(originalCwd); - process.exit(1); + const message = err instanceof Error ? err.message : String(err); + throw new Error(`Failed to connect to database: ${message}`); } const migrationsDir = path.join(projectRoot, "migrations"); - // Check if migrations directory exists try { await access(migrationsDir); } catch { logger.warn(`Migrations directory not found at ${migrationsDir}`); logger.info("Create a 'migrations' folder with your migration files"); - process.chdir(originalCwd); return; } @@ -637,18 +623,14 @@ export async function runMigrateRollbackCommand( try { allMigrations = await loadMigrationFiles(migrationsDir); } catch (err) { - logger.error(`Failed to load migrations: ${(err as Error).message}`); - if (typeof db.close === "function") db.close(); - process.chdir(originalCwd); - process.exit(1); + const message = err instanceof Error ? err.message : String(err); + throw new Error(`Failed to load migrations: ${message}`); } const applied = await getAppliedMigrations(db); if (applied.length === 0) { logger.warn("No migrations to rollback"); - if (typeof db.close === "function") db.close(); - process.chdir(originalCwd); return; } @@ -662,17 +644,14 @@ export async function runMigrateRollbackCommand( const migration = allMigrations.find((m) => m.name === lastMigration.name); if (!migration?.downSql) { - logger.error(`Migration ${lastMigration.name} has no down.sql file`); - logger.info(`Create ${lastMigration.name}_down.sql to enable rollback`); - if (typeof db.close === "function") db.close(); - process.chdir(originalCwd); - process.exit(1); + throw new Error( + `Migration ${lastMigration.name} has no down.sql file. Create ${lastMigration.name}_down.sql to enable rollback.`, + ); } logger.info(`Rolling back: ${migration.name}`); try { - // Execute the down SQL const statements = splitStatements(migration.downSql); for (const stmt of statements) { if (stmt.trim()) { @@ -680,23 +659,17 @@ export async function runMigrateRollbackCommand( } } - // Remove from tracking table - removeMigration(db, migration.name); + await removeMigration(db, migration.name); - logger.success(`✅ Rolled back: ${migration.name}`); + logger.success(`Rolled back: ${migration.name}`); rolledBack++; } catch (err) { - logger.error(`Failed to rollback: ${(err as Error).message}`); - if (typeof db.close === "function") db.close(); - process.chdir(originalCwd); - process.exit(1); + const message = err instanceof Error ? err.message : String(err); + throw new Error(`Failed to rollback ${migration.name}: ${message}`); } } - logger.success(`✅ Rolled back ${rolledBack} migration(s)`); - - if (typeof db.close === "function") db.close(); - process.chdir(originalCwd); + logger.success(`Rolled back ${rolledBack} migration(s)`); } /** @@ -704,26 +677,16 @@ export async function runMigrateRollbackCommand( * Displays all applied migrations */ export async function runMigrateHistoryCommand(projectRoot: string): Promise { - // Change to project directory - const originalCwd = process.cwd(); - if (projectRoot !== originalCwd) { - process.chdir(projectRoot); - } - let db: Database; try { db = await getDatabaseConnection(); } catch (err) { - logger.error(`Failed to connect to database: ${(err as Error).message}`); - process.chdir(originalCwd); - process.exit(1); + const message = err instanceof Error ? err.message : String(err); + throw new Error(`Failed to connect to database: ${message}`); } const applied = await getAppliedMigrations(db); - if (typeof db.close === "function") db.close(); - process.chdir(originalCwd); - if (applied.length === 0) { logger.info("No migrations applied"); return; diff --git a/packages/cli/src/commands/rls-test.ts b/packages/cli/src/commands/rls-test.ts index 5b48fc5..68613b8 100644 --- a/packages/cli/src/commands/rls-test.ts +++ b/packages/cli/src/commands/rls-test.ts @@ -239,8 +239,7 @@ export async function runRLSTestCommand(projectRoot: string, tableName: string): // Check database type const dbType = getDatabaseType(); if (dbType !== "postgresql") { - logger.error(`RLS testing is only supported for PostgreSQL databases. Current: ${dbType}`); - process.exit(1); + throw new Error(`RLS testing is only supported for PostgreSQL databases. Current: ${dbType}`); } // Get database connection @@ -257,8 +256,7 @@ export async function runRLSTestCommand(projectRoot: string, tableName: string): `; if (tableCheck.length === 0) { - logger.error(`Table "${tableName}" not found in public schema`); - process.exit(1); + throw new Error(`Table "${tableName}" not found in public schema`); } // Check if RLS is enabled on the source table @@ -496,7 +494,8 @@ export async function runRLSTestCommand(projectRoot: string, tableName: string): // Exit with error code if any tests failed if (failedCount > 0) { - process.exit(1); + logger.error(`RLS tests: ${passedCount} passed, ${failedCount} failed`); + throw new Error(`${failedCount} RLS test(s) failed`); } } finally { // Cleanup: Drop test schema diff --git a/packages/cli/src/commands/rls.ts b/packages/cli/src/commands/rls.ts index 4d30723..ba4ac6f 100644 --- a/packages/cli/src/commands/rls.ts +++ b/packages/cli/src/commands/rls.ts @@ -93,11 +93,9 @@ export default definePolicy('${table}', { */ export async function runRlsCreate(table: string): Promise { if (!table) { - logger.error("Table name is required. Usage: bb rls create "); - process.exit(1); + throw new Error("Table name is required. Usage: bb rls create
"); } - // Sanitize table name const sanitizedTable = table.replace(/[^a-zA-Z0-9_]/g, "_"); const projectRoot = process.cwd(); @@ -129,23 +127,22 @@ export async function runRlsList(): Promise { const policyFiles = findPolicyFiles(projectRoot); if (policyFiles.length === 0) { - console.log(chalk.yellow("No RLS policies found.")); - console.log(chalk.gray("Create one with: bb rls create
\n")); + logger.warn("No RLS policies found."); + logger.info("Create one with: bb rls create
\n"); return; } - console.log(chalk.bold("\n📋 RLS Policies\n")); + logger.section("RLS Policies"); - // Display in table format - console.log(chalk.gray(`${"Table".padEnd(20)}File`)); - console.log(chalk.gray("-".repeat(50))); + console.log(chalk.dim(`${"Table".padEnd(20)}File`)); + console.log(chalk.dim("-".repeat(50))); for (const file of policyFiles) { const table = file.replace(".policy.ts", ""); console.log(table.padEnd(20) + file); } - console.log(chalk.gray(`\nTotal: ${policyFiles.length} policy file(s)\n`)); + console.log(chalk.dim(`\nTotal: ${policyFiles.length} policy file(s)\n`)); } catch (error) { logger.error(`Failed to list policies: ${error}`); } @@ -157,8 +154,7 @@ export async function runRlsList(): Promise { */ export async function runRlsDisable(table: string): Promise { if (!table) { - logger.error("Table name is required. Usage: bb rls disable
"); - process.exit(1); + throw new Error("Table name is required. Usage: bb rls disable
"); } const projectRoot = process.cwd(); diff --git a/packages/cli/src/commands/storage.ts b/packages/cli/src/commands/storage.ts index 4051293..fb2d055 100644 --- a/packages/cli/src/commands/storage.ts +++ b/packages/cli/src/commands/storage.ts @@ -7,7 +7,6 @@ import { existsSync as fsExistsSync, readFileSync as fsReadFileSync } from "node:fs"; import { readFile, writeFile } from "node:fs/promises"; import path from "node:path"; -import { type BetterBaseConfig, parseConfig } from "@betterbase/core/config"; import { type StorageConfig, type StorageObject, @@ -17,6 +16,7 @@ import { } from "@betterbase/core/storage"; import inquirer from "inquirer"; import * as logger from "../utils/logger"; +import { findConfigFile, loadConfig } from "../utils/config"; /** * Supported storage provider types @@ -33,51 +33,6 @@ interface StorageCredentials { /** * Find and load the BetterBase config file */ -async function findConfigFile(projectRoot: string): Promise { - const configPaths = [ - path.join(projectRoot, "betterbase.config.ts"), - path.join(projectRoot, "betterbase.config.js"), - path.join(projectRoot, "betterbase.config.mts"), - ]; - - for (const configPath of configPaths) { - if (fsExistsSync(configPath)) { - return configPath; - } - } - - return null; -} - -/** - * Load and parse the BetterBase config - */ -async function loadConfig(projectRoot: string): Promise { - const configPath = await findConfigFile(projectRoot); - - if (!configPath) { - return null; - } - - try { - // Dynamic import for ESM modules - const configModule = await import(configPath); - const config = configModule.default || configModule; - - if (config && typeof config === "object") { - const parseResult = parseConfig(config); - if (parseResult.success) { - return parseResult.data; - } - } - - return null; - } catch (error) { - logger.warn(`Failed to load config: ${error instanceof Error ? error.message : String(error)}`); - return null; - } -} - /** * Get storage config from environment variables */ diff --git a/packages/cli/src/commands/webhook.ts b/packages/cli/src/commands/webhook.ts index 7c35a06..3b00c1e 100644 --- a/packages/cli/src/commands/webhook.ts +++ b/packages/cli/src/commands/webhook.ts @@ -6,16 +6,14 @@ import { existsSync as fsExistsSync, readFileSync, writeFileSync } from "node:fs"; import path from "node:path"; -import { type BetterBaseConfig, parseConfig } from "@betterbase/core/config"; import { type WebhookDeliveryLog, WebhookDispatcher } from "@betterbase/core/webhooks"; import type { DBEventType } from "@betterbase/shared"; +import chalk from "chalk"; import inquirer from "inquirer"; import * as logger from "../utils/logger"; +import { findConfigFile, loadConfig } from "../utils/config"; import { SchemaScanner } from "../utils/scanner"; -/** - * Webhook configuration from config file - */ interface WebhookEntry { id: string; table: string; @@ -25,144 +23,11 @@ interface WebhookEntry { enabled: boolean; } -/** - * Find and load the BetterBase config file - */ -async function findConfigFile(projectRoot: string): Promise { - const configPaths = [ - path.join(projectRoot, "betterbase.config.ts"), - path.join(projectRoot, "betterbase.config.js"), - path.join(projectRoot, "betterbase.config.mts"), - ]; - - for (const configPath of configPaths) { - if (fsExistsSync(configPath)) { - return configPath; - } - } - - return null; -} - -/** - * Load and parse the BetterBase config - */ -async function loadConfig(projectRoot: string): Promise { - const configPath = await findConfigFile(projectRoot); - - if (!configPath) { - logger.error('No betterbase.config.ts found. Run "bb init" first.'); - return null; - } - - try { - // Dynamic import for ESM modules - const configModule = await import(configPath); - const config = configModule.default || configModule; - - if (config && typeof config === "object") { - const parseResult = parseConfig(config); - if (parseResult.success) { - return parseResult.data; - } - logger.error(`Config validation failed: ${parseResult.error.message}`); - return null; - } - - return null; - } catch (error) { - logger.error( - `Failed to load config: ${error instanceof Error ? error.message : String(error)}`, - ); - return null; - } -} - -/** - * Find database schema file - */ -function findSchemaFile(projectRoot: string): string | null { - const schemaPaths = [ - path.join(projectRoot, "src/db/schema.ts"), - path.join(projectRoot, "src/database/schema.ts"), - path.join(projectRoot, "schema.ts"), - ]; - - for (const schemaPath of schemaPaths) { - if (fsExistsSync(schemaPath)) { - return schemaPath; - } - } - - return null; -} - -/** - * Get list of tables from schema - */ -function getTablesFromSchema(projectRoot: string): string[] { - const schemaPath = findSchemaFile(projectRoot); - if (!schemaPath) { - return []; - } - - try { - const scanner = new SchemaScanner(schemaPath); - const tables = scanner.scan(); - return Object.keys(tables); - } catch (error) { - logger.warn(`Failed to scan schema: ${error instanceof Error ? error.message : String(error)}`); - return []; - } -} - -/** - * Read the raw config file content - */ -async function readConfigFile( - projectRoot: string, -): Promise<{ content: string; path: string } | null> { - const configPath = findConfigFile(projectRoot); - const resolvedPath = await configPath; - if (!resolvedPath) { - return null; - } - - try { - const content = readFileSync(resolvedPath, "utf-8"); - return { content, path: resolvedPath }; - } catch (error) { - return null; - } -} - -/** - * Write updated config file - */ -function writeConfigFile(configPath: string, content: string): boolean { - try { - writeFileSync(configPath, content, "utf-8"); - return true; - } catch (error) { - logger.error( - `Failed to write config: ${error instanceof Error ? error.message : String(error)}`, - ); - return false; - } -} - -/** - * Generate a unique webhook ID - */ function generateWebhookId(): string { return `webhook-${Date.now().toString(36)}`; } -/** - * Run webhook create command - */ export async function runWebhookCreateCommand(projectRoot: string): Promise { - // Load config to check existing webhooks const config = await loadConfig(projectRoot); if (!config) { @@ -170,7 +35,6 @@ export async function runWebhookCreateCommand(projectRoot: string): Promise([ { - type: "list" as const, + type: "list", name: "tableName", message: "Select the table to trigger webhooks:", choices: tables, @@ -189,10 +52,9 @@ export async function runWebhookCreateCommand(projectRoot: string): Promise([ { - type: "checkbox" as const, + type: "checkbox", name: "events", message: "Select events to trigger webhook:", choices: [ @@ -209,10 +71,9 @@ export async function runWebhookCreateCommand(projectRoot: string): Promise([ { - type: "input" as const, + type: "input", name: "urlEnvVar", message: "Enter the environment variable name for the webhook URL:", default: `WEBHOOK_${tableName.toUpperCase()}_URL`, @@ -229,10 +90,9 @@ export async function runWebhookCreateCommand(projectRoot: string): Promise([ { - type: "input" as const, + type: "input", name: "secretEnvVar", message: "Enter the environment variable name for the webhook secret:", default: "WEBHOOK_SECRET", @@ -249,7 +109,6 @@ export async function runWebhookCreateCommand(projectRoot: string): Promise { const config = await loadConfig(projectRoot); @@ -349,30 +196,25 @@ export async function runWebhookListCommand(projectRoot: string): Promise return; } - // Print webhook table - console.log("\n\x1b[1mWebhooks\x1b[0m"); - console.log("─".repeat(80)); + logger.section(`Webhooks (${webhooks.length})`); + console.log(chalk.dim("─".repeat(80))); console.log( - `\x1b[1m${"ID".padEnd(20)} ${"Table".padEnd(15)} ${"Events".padEnd(20)} ${"Status".padEnd(10)}\x1b[0m`, + chalk.bold(`${"ID".padEnd(20)} ${"Table".padEnd(15)} ${"Events".padEnd(20)} ${"Status".padEnd(10)}`), ); - console.log("─".repeat(80)); + console.log(chalk.dim("─".repeat(80))); for (const webhook of webhooks) { const id = webhook.id.substring(0, 18).padEnd(20); const table = webhook.table.padEnd(15); const events = webhook.events.join(", ").padEnd(20); - const status = webhook.enabled ? "\x1b[32menabled\x1b[0m" : "\x1b[31mdisabled\x1b[0m"; + const status = webhook.enabled ? chalk.green("enabled") : chalk.red("disabled"); console.log(`${id} ${table} ${events} ${status}`); } - console.log("─".repeat(80)); - console.log(`\nTotal: ${webhooks.length} webhook(s)\n`); + console.log(chalk.dim("─".repeat(80))); } -/** - * Run webhook test command - */ export async function runWebhookTestCommand(projectRoot: string, webhookId: string): Promise { const config = await loadConfig(projectRoot); @@ -389,7 +231,6 @@ export async function runWebhookTestCommand(projectRoot: string, webhookId: stri return; } - // Extract env var names from process.env references const urlEnvMatch = webhook.url.match(/^process\.env\.(\w+)$/); const secretEnvMatch = webhook.secret.match(/^process\.env\.(\w+)$/); @@ -401,7 +242,6 @@ export async function runWebhookTestCommand(projectRoot: string, webhookId: stri const urlEnvVar = urlEnvMatch[1]; const secretEnvVar = secretEnvMatch[1]; - // Get actual values from process.env const url = process.env[urlEnvVar]; const secret = process.env[secretEnvVar]; @@ -417,7 +257,6 @@ export async function runWebhookTestCommand(projectRoot: string, webhookId: stri return; } - // Create a temporary dispatcher for testing const testWebhookConfig = { ...webhook, url, @@ -438,7 +277,7 @@ export async function runWebhookTestCommand(projectRoot: string, webhookId: stri logger.success("Webhook test succeeded!"); console.log(` Status: ${result.status_code}`); if (result.response_body) { - console.log(` Response: ${result.response_body.substring(0, 200)}`); + console.log(chalk.dim(` Response: ${result.response_body.substring(0, 200)}`)); } } else { logger.error("Webhook test failed!"); @@ -446,10 +285,10 @@ export async function runWebhookTestCommand(projectRoot: string, webhookId: stri console.log(` Status: ${result.status_code}`); } if (result.response_body) { - console.log(` Response: ${result.response_body.substring(0, 200)}`); + console.log(chalk.dim(` Response: ${result.response_body.substring(0, 200)}`)); } if (result.error) { - console.log(` Error: ${result.error}`); + console.log(chalk.dim(` Error: ${result.error}`)); } } } catch (error) { @@ -457,16 +296,70 @@ export async function runWebhookTestCommand(projectRoot: string, webhookId: stri } } -/** - * Options for webhook logs command - */ interface WebhookLogsOptions { limit?: number; } -/** - * Find database path from project - */ +function findSchemaFile(projectRoot: string): string | null { + const schemaPaths = [ + path.join(projectRoot, "src/db/schema.ts"), + path.join(projectRoot, "src/database/schema.ts"), + path.join(projectRoot, "schema.ts"), + ]; + + for (const schemaPath of schemaPaths) { + if (fsExistsSync(schemaPath)) { + return schemaPath; + } + } + + return null; +} + +function getTablesFromSchema(projectRoot: string): string[] { + const schemaPath = findSchemaFile(projectRoot); + if (!schemaPath) { + return []; + } + + try { + const scanner = new SchemaScanner(schemaPath); + const tables = scanner.scan(); + return Object.keys(tables); + } catch (error) { + logger.warn(`Failed to scan schema: ${error instanceof Error ? error.message : String(error)}`); + return []; + } +} + +async function readConfigFile( + projectRoot: string, +): Promise<{ content: string; path: string } | null> { + const configPath = await findConfigFile(projectRoot); + if (!configPath) { + return null; + } + + try { + const content = readFileSync(configPath, "utf-8"); + return { content, path: configPath }; + } catch { + return null; + } +} + +function writeConfigFile(configPath: string, content: string): boolean { + try { + writeFileSync(configPath, content, "utf-8"); + return true; + } catch (error) { + logger.error( + `Failed to write config: ${error instanceof Error ? error.message : String(error)}`, + ); + return false; + } +} + function findDatabasePath(projectRoot: string): string | null { const dbPathVariants = [ path.join(projectRoot, ".betterbase", "dev.db"), @@ -483,9 +376,6 @@ function findDatabasePath(projectRoot: string): string | null { return null; } -/** - * Run webhook logs command - */ export async function runWebhookLogsCommand( projectRoot: string, webhookId: string, @@ -508,33 +398,29 @@ export async function runWebhookLogsCommand( const limit = options.limit ?? 50; - logger.info(`Webhook: ${webhook.id}`); - logger.info(`Table: ${webhook.table}`); - logger.info(`Events: ${webhook.events.join(", ")}`); - logger.info(`Limit: ${limit}`); + logger.keyValue("Webhook", webhook.id); + logger.keyValue("Table", webhook.table); + logger.keyValue("Events", webhook.events.join(", ")); + logger.keyValue("Limit", String(limit)); + logger.blank(); - console.log("\n\x1b[1mDelivery Logs\x1b[0m"); - console.log("─".repeat(80)); + logger.section("Delivery Logs"); - // Try to find and query the database const dbPath = findDatabasePath(projectRoot); if (!dbPath) { logger.info("No local database found."); logger.info("Delivery logs are stored in the project's database."); - console.log("\n To view logs, either:"); - console.log(" 1. Run the dev server and access the API: GET /api/webhooks/:webhookId/deliveries"); - console.log(" 2. Check the dashboard if deployed\n"); - console.log("─".repeat(80)); + console.log(chalk.dim("\n To view logs, either:")); + console.log(chalk.dim(" 1. Run the dev server and access the API: GET /api/webhooks/:webhookId/deliveries")); + console.log(chalk.dim(" 2. Check the dashboard if deployed\n")); return; } try { - // Use Bun's sqlite to query the database directly const { Database } = await import("bun:sqlite"); const db = new Database(dbPath, { readonly: true }); - // Try to query the deliveries table interface DeliveryLog { id: string; webhook_id: string; @@ -550,7 +436,7 @@ export async function runWebhookLogsCommand( const result: DeliveryLog[] = db .query( - `SELECT + `SELECT id, webhook_id, status, @@ -571,18 +457,13 @@ export async function runWebhookLogsCommand( db.close(); if (result.length === 0) { - console.log("\n No delivery logs found for this webhook.\n"); - console.log("─".repeat(80)); + logger.info("No delivery logs found for this webhook."); return; } - // Print table header - console.log( - `\x1b[1m${"Status".padEnd(10)} ${"Code".padEnd(6)} ${"Attempts".padEnd(10)} ${"Created At".padEnd(24)} ${"Error".padEnd(20)}\x1b[0m`, - ); - console.log("─".repeat(80)); + console.log(chalk.bold(`${"Status".padEnd(10)} ${"Code".padEnd(6)} ${"Attempts".padEnd(10)} ${"Created At".padEnd(24)} ${"Error".padEnd(20)}`)); + console.log(chalk.dim("─".repeat(80))); - // Print each log entry for (const log of result) { const status = log.status.padEnd(10); const code = (log.response_code?.toString() ?? "N/A").padEnd(6); @@ -592,34 +473,28 @@ export async function runWebhookLogsCommand( : "N/A"; const error = log.error ? log.error.substring(0, 20) : ""; - // Color code status const statusColored = log.status === "success" - ? "\x1b[32m" + status + "\x1b[0m" + ? chalk.green(status) : log.status === "failed" - ? "\x1b[31m" + status + "\x1b[0m" - : "\x1b[33m" + status + "\x1b[0m"; + ? chalk.red(status) + : chalk.yellow(status); console.log(`${statusColored} ${code} ${attempts} ${createdAt} ${error}`); } - console.log("─".repeat(80)); + console.log(chalk.dim("─".repeat(80))); console.log(`\nTotal: ${result.length} delivery log(s)\n`); } catch (error) { - // Table might not exist or other error logger.warn("Could not fetch delivery logs from database."); if (error instanceof Error) { logger.warn(error.message); } - console.log("\n Make sure migrations have been run."); - console.log(" Run: bb migrate\n"); - console.log("─".repeat(80)); + console.log(chalk.dim("\n Make sure migrations have been run.")); + console.log(chalk.dim(" Run: bb migrate\n")); } } -/** - * Execute webhook command with subcommands - */ export async function runWebhookCommand(args: string[], projectRoot: string): Promise { const [subcommand, ...remainingArgs] = args; @@ -652,23 +527,21 @@ export async function runWebhookCommand(args: string[], projectRoot: string): Pr break; default: - console.log(` -\x1b[1mBetterBase Webhook Commands\x1b[0m - -\x1b[1mUsage:\x1b[0m - bb webhook [options] - -\x1b[1mCommands:\x1b[0m - create Create a new webhook - list List all configured webhooks - test Test a webhook by sending a synthetic payload - logs Show delivery logs for a webhook - -\x1b[1mExamples:\x1b[0m - bb webhook create - bb webhook list - bb webhook test webhook-abc123 - bb webhook logs webhook-abc123 -`); + logger.section("BetterBase Webhook Commands"); + logger.info("Usage:"); + console.log(chalk.dim(" bb webhook [options]")); + logger.blank(); + logger.info("Commands:"); + console.log(chalk.dim(` ${chalk.white("create")} Create a new webhook`)); + console.log(chalk.dim(` ${chalk.white("list")} List all configured webhooks`)); + console.log(chalk.dim(` ${chalk.white("test ")} Test a webhook by sending a synthetic payload`)); + console.log(chalk.dim(` ${chalk.white("logs ")} Show delivery logs for a webhook`)); + logger.blank(); + logger.info("Examples:"); + console.log(chalk.dim(" bb webhook create")); + console.log(chalk.dim(" bb webhook list")); + console.log(chalk.dim(" bb webhook test webhook-abc123")); + console.log(chalk.dim(" bb webhook logs webhook-abc123")); + break; } } diff --git a/packages/cli/src/index.ts b/packages/cli/src/index.ts index fd053ce..cf9b09f 100644 --- a/packages/cli/src/index.ts +++ b/packages/cli/src/index.ts @@ -9,7 +9,9 @@ import { runGenerateCrudCommand } from "./commands/generate"; import { runGenerateGraphqlCommand, runGraphqlPlaygroundCommand } from "./commands/graphql"; import { runIacAnalyze } from "./commands/iac/analyze"; import { runIacExport } from "./commands/iac/export"; +import { runIacGenerate } from "./commands/iac/generate"; import { runIacImport } from "./commands/iac/import"; +import { runIacSync } from "./commands/iac/sync"; import { runInitCommand } from "./commands/init"; import { isAuthenticated, runLoginCommand, runLogoutCommand } from "./commands/login"; import { @@ -41,18 +43,26 @@ const PUBLIC_COMMANDS = [ "-h", ]; +function extractCommandName(argv: string[]): string { + for (let i = 2; i < argv.length; i++) { + const arg = argv[i]; + if (arg && !arg.startsWith("-")) { + return arg; + } + } + return ""; +} + /** * Check if the user is authenticated before running a command. */ async function checkAuthHook(): Promise { - const commandName = process.argv[2]; + const commandName = extractCommandName(process.argv); - // Skip auth check for public commands - if (PUBLIC_COMMANDS.includes(commandName)) { + if (!commandName || PUBLIC_COMMANDS.includes(commandName)) { return; } - // Check authentication status const authenticated = await isAuthenticated(); if (!authenticated) { logger.error( @@ -225,6 +235,23 @@ export function createProgram(): Command { const iac = program.command("iac").description("IaC (Infrastructure as Code) management"); + iac + .command("sync") + .description("Sync IaC schema changes and generate Drizzle migration") + .argument("[project-root]", "project root directory", process.cwd()) + .option("--force", "Apply destructive changes without confirmation") + .action(async (projectRoot: string, options: { force?: boolean }) => { + await runIacSync(projectRoot, { force: options.force }); + }); + + iac + .command("generate") + .description("Generate API type definitions from betterbase/ functions") + .argument("[project-root]", "project root directory", process.cwd()) + .action(async (projectRoot: string) => { + await runIacGenerate(projectRoot); + }); + iac .command("analyze") .description("Run query diagnostics and analyze for performance issues") @@ -274,21 +301,21 @@ export function createProgram(): Command { .description("Generate and apply migrations for local development"); migrate.action(async () => { - await runMigrateCommand({}); + await runMigrateCommand({ projectRoot: process.cwd() }); }); migrate .command("preview") .description("Preview migration diff without applying changes") .action(async () => { - await runMigrateCommand({ preview: true }); + await runMigrateCommand({ preview: true, projectRoot: process.cwd() }); }); migrate .command("production") .description("Apply migrations to production (requires confirmation)") .action(async () => { - await runMigrateCommand({ production: true }); + await runMigrateCommand({ production: true, projectRoot: process.cwd() }); }); migrate diff --git a/packages/cli/src/utils/config.ts b/packages/cli/src/utils/config.ts new file mode 100644 index 0000000..23af02b --- /dev/null +++ b/packages/cli/src/utils/config.ts @@ -0,0 +1,64 @@ +import { existsSync as fsExistsSync, readFileSync } from "node:fs"; +import path from "node:path"; +import { type BetterBaseConfig, parseConfig } from "@betterbase/core/config"; +import { CONFIG_FILE_NAME } from "@betterbase/shared"; +import * as logger from "./logger"; + +export async function findConfigFile(projectRoot: string): Promise { + const configPaths = [ + path.join(projectRoot, CONFIG_FILE_NAME), + path.join(projectRoot, CONFIG_FILE_NAME.replace(".ts", ".js")), + path.join(projectRoot, CONFIG_FILE_NAME.replace(".ts", ".mts")), + ]; + + for (const configPath of configPaths) { + if (fsExistsSync(configPath)) { + return configPath; + } + } + + return null; +} + +export async function loadConfig(projectRoot: string): Promise { + const configPath = await findConfigFile(projectRoot); + + if (!configPath) { + return null; + } + + try { + const configModule = await import(configPath); + const config = configModule.default || configModule; + + if (config && typeof config === "object") { + const parseResult = parseConfig(config); + if (parseResult.success) { + return parseResult.data; + } + logger.warn(`Config validation: ${parseResult.error.message}`); + return null; + } + + return null; + } catch (error) { + logger.warn(`Failed to load config: ${error instanceof Error ? error.message : String(error)}`); + return null; + } +} + +export async function readConfigFile( + projectRoot: string, +): Promise<{ content: string; path: string } | null> { + const configPath = await findConfigFile(projectRoot); + if (!configPath) { + return null; + } + + try { + const content = readFileSync(configPath, "utf-8"); + return { content, path: configPath }; + } catch { + return null; + } +} diff --git a/packages/cli/src/utils/context-generator.ts b/packages/cli/src/utils/context-generator.ts index a6cc37a..5b25315 100644 --- a/packages/cli/src/utils/context-generator.ts +++ b/packages/cli/src/utils/context-generator.ts @@ -2,7 +2,7 @@ import { existsSync, readFileSync, readdirSync, writeFileSync } from "node:fs"; import path from "node:path"; import * as logger from "./logger"; import { type RouteInfo, RouteScanner } from "./route-scanner"; -import { SchemaScanner, type TableInfo } from "./schema-scanner"; +import { SchemaScanner, type TableInfo } from "./scanner"; export interface BetterBaseContext { version: string; diff --git a/packages/cli/src/utils/schema-scanner.ts b/packages/cli/src/utils/schema-scanner.ts deleted file mode 100644 index dfa802e..0000000 --- a/packages/cli/src/utils/schema-scanner.ts +++ /dev/null @@ -1,2 +0,0 @@ -export { SchemaScanner } from "./scanner"; -export type { ColumnInfo, TableInfo } from "./scanner"; diff --git a/packages/cli/test/error-messages.test.ts b/packages/cli/test/error-messages.test.ts index 6b7ede1..f41e57e 100644 --- a/packages/cli/test/error-messages.test.ts +++ b/packages/cli/test/error-messages.test.ts @@ -70,7 +70,7 @@ export const comments = sqliteTable('comments', { ); // Import the SchemaScanner to get available tables - const { SchemaScanner } = await import("../src/utils/schema-scanner"); + const { SchemaScanner } = await import("../src/utils/scanner"); const schemaPath = path.join(testDir, "src/db/schema.ts"); const scanner = new SchemaScanner(schemaPath); const tables = scanner.scan();