From 5c4526712ba99656fc3a06ba2124120587819a12 Mon Sep 17 00:00:00 2001 From: Ziad Khaled Date: Thu, 19 Feb 2026 01:11:01 +0200 Subject: [PATCH 01/12] Use Bun workspace runner for root typecheck --- betterbase/.gitignore | 6 ++-- betterbase/README.md | 10 ++++++ betterbase/apps/cli/tsconfig.json | 11 ++++-- betterbase/package.json | 4 +-- betterbase/packages/cli/src/build.ts | 17 +++++---- betterbase/packages/cli/src/commands/init.ts | 6 ++-- betterbase/templates/base/README.md | 12 +++++++ betterbase/templates/base/package.json | 4 ++- betterbase/templates/base/src/db/index.ts | 6 ++-- betterbase/templates/base/src/db/migrate.ts | 4 ++- betterbase/templates/base/src/index.ts | 5 +-- betterbase/templates/base/src/lib/env.ts | 3 ++ betterbase/templates/base/src/routes/index.ts | 5 +-- betterbase/templates/base/src/routes/users.ts | 35 +++++++++++++++++-- betterbase/tsconfig.base.json | 3 +- 15 files changed, 105 insertions(+), 26 deletions(-) diff --git a/betterbase/.gitignore b/betterbase/.gitignore index 51c8bb1..9b75d8d 100644 --- a/betterbase/.gitignore +++ b/betterbase/.gitignore @@ -3,14 +3,16 @@ node_modules .turbo dist .next +*.sqlite3 +*.sqlite +*.db +*.tsbuildinfo .vscode/ .idea/ .env .env.* -.env.local -.env.test !.env.example *.log diff --git a/betterbase/README.md b/betterbase/README.md index 46bf456..d9aaed2 100644 --- a/betterbase/README.md +++ b/betterbase/README.md @@ -19,6 +19,16 @@ Initial BetterBase monorepo scaffold with a concrete base template. - Workspace orchestration: **Turborepo** - Language: **TypeScript** + +## Monorepo Commands + +From the monorepo root: + +- `bun install` +- `bun run dev` +- `bun run build` +- `bun run typecheck` + ## Base Template Commands From `templates/base`: diff --git a/betterbase/apps/cli/tsconfig.json b/betterbase/apps/cli/tsconfig.json index 4031161..cd17ee5 100644 --- a/betterbase/apps/cli/tsconfig.json +++ b/betterbase/apps/cli/tsconfig.json @@ -3,7 +3,14 @@ "compilerOptions": { "outDir": "dist", "rootDir": ".", - "types": ["bun"] + "types": [ + "bun" + ] }, - "include": ["src", "test"] + "include": [ + "src/**/*.ts", + "src/**/*.tsx", + "test/**/*.ts", + "test/**/*.tsx" + ] } diff --git a/betterbase/package.json b/betterbase/package.json index eaa5a17..fed8759 100644 --- a/betterbase/package.json +++ b/betterbase/package.json @@ -1,7 +1,7 @@ { "name": "betterbase", "private": true, - "packageManager": "bun@1.1.38", + "packageManager": "bun@1.2.x", "workspaces": [ "apps/*", "packages/*" @@ -10,7 +10,7 @@ "build": "turbo run build", "dev": "turbo run dev --parallel", "lint": "turbo run lint", - "typecheck": "turbo run typecheck" + "typecheck": "bun run --filter '*' typecheck" }, "devDependencies": { "turbo": "^2.0.0", diff --git a/betterbase/packages/cli/src/build.ts b/betterbase/packages/cli/src/build.ts index 198205e..b05d2fb 100644 --- a/betterbase/packages/cli/src/build.ts +++ b/betterbase/packages/cli/src/build.ts @@ -1,10 +1,17 @@ +import path from 'node:path'; +import { fileURLToPath } from 'node:url'; + /** * Build the CLI as a standalone bundled executable output. */ export async function buildStandaloneCli(): Promise { + const moduleDir = path.dirname(fileURLToPath(import.meta.url)); + const entrypoint = path.resolve(moduleDir, 'index.ts'); + const outdir = path.resolve(moduleDir, '../dist'); + const result = await Bun.build({ - entrypoints: ['./src/index.ts'], - outdir: './dist', + entrypoints: [entrypoint], + outdir, target: 'bun', format: 'esm', minify: false, @@ -17,7 +24,7 @@ export async function buildStandaloneCli(): Promise { throw new Error(`Build failed with ${result.logs.length} error(s).\n${diagnostics}`); } - const outputPath = './dist/index.js'; + const outputPath = path.join(outdir, 'index.js'); const compiled = await Bun.file(outputPath).text(); await Bun.write(outputPath, `#!/usr/bin/env bun\n${compiled}`); } @@ -27,10 +34,8 @@ async function main(): Promise { } const isEsmMain = typeof import.meta !== 'undefined' && import.meta.main; -const cjs = globalThis as unknown as { require?: { main?: unknown }; module?: unknown }; -const isCjsMain = cjs.require?.main !== undefined && cjs.require.main === cjs.module; -if (isEsmMain || isCjsMain) { +if (isEsmMain) { main().catch((error) => { console.error('Build failed:', error); process.exit(1); diff --git a/betterbase/packages/cli/src/commands/init.ts b/betterbase/packages/cli/src/commands/init.ts index 5859b89..3b624e3 100644 --- a/betterbase/packages/cli/src/commands/init.ts +++ b/betterbase/packages/cli/src/commands/init.ts @@ -63,8 +63,8 @@ async function initializeGitRepository(projectPath: string): Promise { function buildPackageJson(projectName: string, databaseMode: DatabaseMode, useAuth: boolean): string { const dependencies: Record = { hono: '^4.11.9', - 'drizzle-orm': '^0.44.5', - zod: '^3.25.76', + 'drizzle-orm': '^0.45.1', + zod: '^4.3.6', }; if (databaseMode === 'turso') { @@ -447,7 +447,7 @@ export const healthRoute = new Hono(); healthRoute.get('/', async (c) => { try { - await db.run(sql\`select 1\`); + await db.execute(sql\`select 1\`); return c.json({ status: 'healthy', diff --git a/betterbase/templates/base/README.md b/betterbase/templates/base/README.md index e6d4cf1..8d481e3 100644 --- a/betterbase/templates/base/README.md +++ b/betterbase/templates/base/README.md @@ -26,3 +26,15 @@ src/ betterbase.config.ts drizzle.config.ts ``` + + +## Quick Start + +- Install dependencies: `bun install` +- Start development server: `bun run dev` +- Generate Drizzle migrations: `bun run db:generate` +- Apply migrations locally: `bun run db:push` +- Build for production: `bun run build` +- Start production server: `bun run start` + +Environment variables are validated in `src/lib/env.ts` (`NODE_ENV`, `PORT`, `DB_PATH`). diff --git a/betterbase/templates/base/package.json b/betterbase/templates/base/package.json index e631611..c74eee0 100644 --- a/betterbase/templates/base/package.json +++ b/betterbase/templates/base/package.json @@ -6,7 +6,9 @@ "dev": "bun --hot run src/index.ts", "db:generate": "drizzle-kit generate", "db:push": "bun run src/db/migrate.ts", - "typecheck": "tsc --noEmit" + "typecheck": "tsc --noEmit", + "build": "bun build src/index.ts --outfile dist/index.js --target bun", + "start": "bun run dist/index.js" }, "dependencies": { "hono": "^4.6.10", diff --git a/betterbase/templates/base/src/db/index.ts b/betterbase/templates/base/src/db/index.ts index ddbbc3f..0b797bf 100644 --- a/betterbase/templates/base/src/db/index.ts +++ b/betterbase/templates/base/src/db/index.ts @@ -1,8 +1,10 @@ import { Database } from 'bun:sqlite'; import { drizzle } from 'drizzle-orm/bun-sqlite'; +import { env, DEFAULT_DB_PATH } from '../lib/env'; import * as schema from './schema'; -const dbPath = process.env.DB_PATH ?? 'local.db'; -const sqlite = new Database(dbPath, { create: true }); +export { DEFAULT_DB_PATH }; + +const sqlite = new Database(env.DB_PATH || DEFAULT_DB_PATH, { create: true }); export const db = drizzle(sqlite, { schema }); diff --git a/betterbase/templates/base/src/db/migrate.ts b/betterbase/templates/base/src/db/migrate.ts index c590040..8525fef 100644 --- a/betterbase/templates/base/src/db/migrate.ts +++ b/betterbase/templates/base/src/db/migrate.ts @@ -1,9 +1,11 @@ import { Database } from 'bun:sqlite'; import { drizzle } from 'drizzle-orm/bun-sqlite'; import { migrate } from 'drizzle-orm/bun-sqlite/migrator'; +import { env } from '../lib/env'; +import { DEFAULT_DB_PATH } from './index'; try { - const sqlite = new Database(process.env.DB_PATH ?? 'local.db', { create: true }); + const sqlite = new Database(env.DB_PATH || DEFAULT_DB_PATH, { create: true }); const db = drizzle(sqlite); migrate(db, { migrationsFolder: './drizzle' }); diff --git a/betterbase/templates/base/src/index.ts b/betterbase/templates/base/src/index.ts index 4065ba5..359c1c5 100644 --- a/betterbase/templates/base/src/index.ts +++ b/betterbase/templates/base/src/index.ts @@ -1,4 +1,5 @@ import { Hono } from 'hono'; +import { env } from './lib/env'; import { registerRoutes } from './routes'; const app = new Hono(); @@ -6,8 +7,8 @@ registerRoutes(app); const server = Bun.serve({ fetch: app.fetch, - port: Number(process.env.PORT ?? 3000), - development: process.env.NODE_ENV === 'development', + port: env.PORT, + development: env.NODE_ENV === 'development', }); console.log(`šŸš€ Server running at http://localhost:${server.port}`); diff --git a/betterbase/templates/base/src/lib/env.ts b/betterbase/templates/base/src/lib/env.ts index c4a0b38..c61b432 100644 --- a/betterbase/templates/base/src/lib/env.ts +++ b/betterbase/templates/base/src/lib/env.ts @@ -1,8 +1,11 @@ import { z } from 'zod'; +export const DEFAULT_DB_PATH = 'local.db'; + const envSchema = z.object({ NODE_ENV: z.enum(['development', 'test', 'production']).default('development'), PORT: z.coerce.number().default(3000), + DB_PATH: z.string().min(1).default(DEFAULT_DB_PATH), }); export const env = envSchema.parse(process.env); diff --git a/betterbase/templates/base/src/routes/index.ts b/betterbase/templates/base/src/routes/index.ts index 64a9e83..2fb73e6 100644 --- a/betterbase/templates/base/src/routes/index.ts +++ b/betterbase/templates/base/src/routes/index.ts @@ -2,6 +2,7 @@ import { Hono } from 'hono'; import { cors } from 'hono/cors'; import { logger } from 'hono/logger'; import { HTTPException } from 'hono/http-exception'; +import { env } from '../lib/env'; import { healthRoute } from './health'; import { usersRoute } from './users'; @@ -11,12 +12,12 @@ export function registerRoutes(app: Hono): void { app.onError((err, c) => { const isHttpError = err instanceof HTTPException; - const showDetailedError = process.env.NODE_ENV === 'development' || isHttpError; + const showDetailedError = env.NODE_ENV === 'development' || isHttpError; return c.json( { error: showDetailedError ? err.message : 'Internal Server Error', - stack: process.env.NODE_ENV === 'development' ? err.stack : undefined, + stack: env.NODE_ENV === 'development' ? err.stack : undefined, details: isHttpError ? (err as { cause?: unknown }).cause ?? null : null, }, isHttpError ? err.status : 500, diff --git a/betterbase/templates/base/src/routes/users.ts b/betterbase/templates/base/src/routes/users.ts index 29f17c6..3c5f580 100644 --- a/betterbase/templates/base/src/routes/users.ts +++ b/betterbase/templates/base/src/routes/users.ts @@ -10,11 +10,42 @@ export const createUserSchema = z.object({ name: z.string().min(1), }); +const DEFAULT_LIMIT = 25; +const MAX_LIMIT = 100; +const DEFAULT_OFFSET = 0; + +function parseNonNegativeInt(value: string | undefined, fallback: number): number { + if (!value) { + return fallback; + } + + const parsed = Number.parseInt(value, 10); + if (Number.isNaN(parsed) || parsed < 0) { + return fallback; + } + + return parsed; +} + export const usersRoute = new Hono(); usersRoute.get('/', async (c) => { - const allUsers = await db.select().from(users); - return c.json({ users: allUsers }); + const requestedLimit = parseNonNegativeInt(c.req.query('limit'), DEFAULT_LIMIT); + const limit = Math.min(requestedLimit, MAX_LIMIT); + const offset = parseNonNegativeInt(c.req.query('offset'), DEFAULT_OFFSET); + + const rows = await db.select().from(users).limit(limit + 1).offset(offset); + const hasMore = rows.length > limit; + const paginatedUsers = hasMore ? rows.slice(0, limit) : rows; + + return c.json({ + users: paginatedUsers, + pagination: { + limit, + offset, + hasMore, + }, + }); }); usersRoute.post('/', async (c) => { diff --git a/betterbase/tsconfig.base.json b/betterbase/tsconfig.base.json index b1d6a90..e86a10c 100644 --- a/betterbase/tsconfig.base.json +++ b/betterbase/tsconfig.base.json @@ -8,6 +8,7 @@ "skipLibCheck": true, "resolveJsonModule": true, "isolatedModules": true, - "forceConsistentCasingInFileNames": true + "forceConsistentCasingInFileNames": true, + "declaration": true } } From d074f1a165177b5333bd6d1282b04314c6f19028 Mon Sep 17 00:00:00 2001 From: Ziad Khaled Date: Thu, 19 Feb 2026 02:00:16 +0200 Subject: [PATCH 02/12] Address follow-up review fixes for Bun pin, migrations, and pagination --- betterbase/package.json | 2 +- betterbase/packages/cli/src/build.ts | 7 ++-- betterbase/packages/cli/src/commands/init.ts | 34 +++++++++++++++++-- betterbase/templates/base/src/db/index.ts | 3 +- betterbase/templates/base/src/db/migrate.ts | 5 ++- betterbase/templates/base/src/routes/users.ts | 32 ++++++++++------- 6 files changed, 58 insertions(+), 25 deletions(-) diff --git a/betterbase/package.json b/betterbase/package.json index fed8759..c4b36e5 100644 --- a/betterbase/package.json +++ b/betterbase/package.json @@ -1,7 +1,7 @@ { "name": "betterbase", "private": true, - "packageManager": "bun@1.2.x", + "packageManager": "bun@1.2.14", "workspaces": [ "apps/*", "packages/*" diff --git a/betterbase/packages/cli/src/build.ts b/betterbase/packages/cli/src/build.ts index b05d2fb..86937ef 100644 --- a/betterbase/packages/cli/src/build.ts +++ b/betterbase/packages/cli/src/build.ts @@ -1,11 +1,10 @@ import path from 'node:path'; -import { fileURLToPath } from 'node:url'; /** * Build the CLI as a standalone bundled executable output. */ export async function buildStandaloneCli(): Promise { - const moduleDir = path.dirname(fileURLToPath(import.meta.url)); + const moduleDir = import.meta.dir; const entrypoint = path.resolve(moduleDir, 'index.ts'); const outdir = path.resolve(moduleDir, '../dist'); @@ -33,9 +32,7 @@ async function main(): Promise { await buildStandaloneCli(); } -const isEsmMain = typeof import.meta !== 'undefined' && import.meta.main; - -if (isEsmMain) { +if (import.meta.main) { main().catch((error) => { console.error('Build failed:', error); process.exit(1); diff --git a/betterbase/packages/cli/src/commands/init.ts b/betterbase/packages/cli/src/commands/init.ts index 3b624e3..8e0c92b 100644 --- a/betterbase/packages/cli/src/commands/init.ts +++ b/betterbase/packages/cli/src/commands/init.ts @@ -510,9 +510,39 @@ const createUserSchema = z.object({ export const usersRoute = new Hono(); +const DEFAULT_LIMIT = 25; +const MAX_LIMIT = 100; +const DEFAULT_OFFSET = 0; + +function parseNonNegativeInt(value: string | undefined, fallback: number): number { + if (!value) { + return fallback; + } + + const parsed = Number(value); + if (!Number.isInteger(parsed) || parsed < 0) { + return fallback; + } + + return parsed; +} + usersRoute.get('/', async (c) => { - const allUsers = await db.select().from(users); - return c.json({ users: allUsers }); + const requestedLimit = parseNonNegativeInt(c.req.query('limit'), DEFAULT_LIMIT); + const limit = Math.min(requestedLimit, MAX_LIMIT); + const effectiveLimit = Math.max(limit, 1); + const offset = parseNonNegativeInt(c.req.query('offset'), DEFAULT_OFFSET); + + const rows = await db.select().from(users).limit(effectiveLimit + 1).offset(offset); + const hasMore = limit === 0 ? false : rows.length > limit; + const paginatedUsers = limit === 0 ? [] : rows.slice(0, limit); + + return c.json({ + users: paginatedUsers, + limit, + offset, + hasMore, + }); }); usersRoute.post('/', async (c) => { diff --git a/betterbase/templates/base/src/db/index.ts b/betterbase/templates/base/src/db/index.ts index 0b797bf..3a149bf 100644 --- a/betterbase/templates/base/src/db/index.ts +++ b/betterbase/templates/base/src/db/index.ts @@ -5,6 +5,7 @@ import * as schema from './schema'; export { DEFAULT_DB_PATH }; -const sqlite = new Database(env.DB_PATH || DEFAULT_DB_PATH, { create: true }); +// env.DB_PATH is always present because env schema provides a default. +const sqlite = new Database(env.DB_PATH, { create: true }); export const db = drizzle(sqlite, { schema }); diff --git a/betterbase/templates/base/src/db/migrate.ts b/betterbase/templates/base/src/db/migrate.ts index 8525fef..2bdd9bf 100644 --- a/betterbase/templates/base/src/db/migrate.ts +++ b/betterbase/templates/base/src/db/migrate.ts @@ -1,11 +1,10 @@ import { Database } from 'bun:sqlite'; import { drizzle } from 'drizzle-orm/bun-sqlite'; import { migrate } from 'drizzle-orm/bun-sqlite/migrator'; -import { env } from '../lib/env'; -import { DEFAULT_DB_PATH } from './index'; +import { DEFAULT_DB_PATH } from '../lib/env'; try { - const sqlite = new Database(env.DB_PATH || DEFAULT_DB_PATH, { create: true }); + const sqlite = new Database(DEFAULT_DB_PATH, { create: true }); const db = drizzle(sqlite); migrate(db, { migrationsFolder: './drizzle' }); diff --git a/betterbase/templates/base/src/routes/users.ts b/betterbase/templates/base/src/routes/users.ts index 3c5f580..11a7dda 100644 --- a/betterbase/templates/base/src/routes/users.ts +++ b/betterbase/templates/base/src/routes/users.ts @@ -19,8 +19,8 @@ function parseNonNegativeInt(value: string | undefined, fallback: number): numbe return fallback; } - const parsed = Number.parseInt(value, 10); - if (Number.isNaN(parsed) || parsed < 0) { + const parsed = Number(value); + if (!Number.isInteger(parsed) || parsed < 0) { return fallback; } @@ -32,20 +32,26 @@ export const usersRoute = new Hono(); usersRoute.get('/', async (c) => { const requestedLimit = parseNonNegativeInt(c.req.query('limit'), DEFAULT_LIMIT); const limit = Math.min(requestedLimit, MAX_LIMIT); + const effectiveLimit = Math.max(limit, 1); const offset = parseNonNegativeInt(c.req.query('offset'), DEFAULT_OFFSET); - const rows = await db.select().from(users).limit(limit + 1).offset(offset); - const hasMore = rows.length > limit; - const paginatedUsers = hasMore ? rows.slice(0, limit) : rows; + try { + const rows = await db.select().from(users).limit(effectiveLimit + 1).offset(offset); + const hasMore = limit === 0 ? false : rows.length > limit; + const paginatedUsers = limit === 0 ? [] : rows.slice(0, limit); - return c.json({ - users: paginatedUsers, - pagination: { - limit, - offset, - hasMore, - }, - }); + return c.json({ + users: paginatedUsers, + pagination: { + limit, + offset, + hasMore, + }, + }); + } catch (error) { + console.error('Failed to fetch users:', error); + return c.json({ error: 'Failed to fetch users' }, 500); + } }); usersRoute.post('/', async (c) => { From eae08a2dca4a933296b14b231c5e5ac1b9c3b973 Mon Sep 17 00:00:00 2001 From: Ziad Khaled Date: Thu, 19 Feb 2026 02:28:53 +0200 Subject: [PATCH 03/12] Align init scaffold users/health handling with template behavior --- betterbase/packages/cli/src/commands/init.ts | 33 ++++++++++++------- betterbase/templates/base/src/routes/users.ts | 6 +++- 2 files changed, 27 insertions(+), 12 deletions(-) diff --git a/betterbase/packages/cli/src/commands/init.ts b/betterbase/packages/cli/src/commands/init.ts index 8e0c92b..2fa594a 100644 --- a/betterbase/packages/cli/src/commands/init.ts +++ b/betterbase/packages/cli/src/commands/init.ts @@ -427,7 +427,7 @@ bun.lockb .env.* !.env.example local.db -.drizzle +drizzle/ `, ); @@ -447,7 +447,7 @@ export const healthRoute = new Hono(); healthRoute.get('/', async (c) => { try { - await db.execute(sql\`select 1\`); + await db.${databaseMode === 'local' ? 'run' : 'execute'}(sql\`select 1\`); return c.json({ status: 'healthy', @@ -533,16 +533,27 @@ usersRoute.get('/', async (c) => { const effectiveLimit = Math.max(limit, 1); const offset = parseNonNegativeInt(c.req.query('offset'), DEFAULT_OFFSET); - const rows = await db.select().from(users).limit(effectiveLimit + 1).offset(offset); - const hasMore = limit === 0 ? false : rows.length > limit; - const paginatedUsers = limit === 0 ? [] : rows.slice(0, limit); + try { + const rows = await db.select().from(users).limit(effectiveLimit + 1).offset(offset); + const hasMore = limit === 0 ? false : rows.length > limit; + const paginatedUsers = limit === 0 ? [] : rows.slice(0, limit); - return c.json({ - users: paginatedUsers, - limit, - offset, - hasMore, - }); + return c.json({ + users: paginatedUsers, + pagination: { + limit, + offset, + hasMore, + }, + }); + } catch (error) { + if (error instanceof HTTPException) { + throw error; + } + + console.error('Failed to fetch users:', error); + throw error; + } }); usersRoute.post('/', async (c) => { diff --git a/betterbase/templates/base/src/routes/users.ts b/betterbase/templates/base/src/routes/users.ts index 11a7dda..be22408 100644 --- a/betterbase/templates/base/src/routes/users.ts +++ b/betterbase/templates/base/src/routes/users.ts @@ -49,8 +49,12 @@ usersRoute.get('/', async (c) => { }, }); } catch (error) { + if (error instanceof HTTPException) { + throw error; + } + console.error('Failed to fetch users:', error); - return c.json({ error: 'Failed to fetch users' }, 500); + throw error; } }); From 3462a9630d64cfa3d81a583fbf07f10dc301e600 Mon Sep 17 00:00:00 2001 From: Ziad Khaled Date: Thu, 19 Feb 2026 02:50:08 +0200 Subject: [PATCH 04/12] Implement bb migrate preview/production workflow --- betterbase/.gitignore | 6 +- betterbase/README.md | 10 + betterbase/apps/cli/tsconfig.json | 11 +- betterbase/package.json | 4 +- betterbase/packages/cli/src/build.ts | 18 +- betterbase/packages/cli/src/commands/init.ts | 53 ++- .../packages/cli/src/commands/migrate.ts | 343 +++++++++++++++++- betterbase/packages/cli/src/index.ts | 21 +- betterbase/packages/cli/test/smoke.test.ts | 8 +- betterbase/templates/base/README.md | 12 + betterbase/templates/base/package.json | 4 +- betterbase/templates/base/src/db/index.ts | 7 +- betterbase/templates/base/src/db/migrate.ts | 3 +- betterbase/templates/base/src/index.ts | 5 +- betterbase/templates/base/src/lib/env.ts | 3 + betterbase/templates/base/src/routes/index.ts | 5 +- betterbase/templates/base/src/routes/users.ts | 45 ++- betterbase/tsconfig.base.json | 3 +- 18 files changed, 513 insertions(+), 48 deletions(-) diff --git a/betterbase/.gitignore b/betterbase/.gitignore index 51c8bb1..9b75d8d 100644 --- a/betterbase/.gitignore +++ b/betterbase/.gitignore @@ -3,14 +3,16 @@ node_modules .turbo dist .next +*.sqlite3 +*.sqlite +*.db +*.tsbuildinfo .vscode/ .idea/ .env .env.* -.env.local -.env.test !.env.example *.log diff --git a/betterbase/README.md b/betterbase/README.md index 46bf456..d9aaed2 100644 --- a/betterbase/README.md +++ b/betterbase/README.md @@ -19,6 +19,16 @@ Initial BetterBase monorepo scaffold with a concrete base template. - Workspace orchestration: **Turborepo** - Language: **TypeScript** + +## Monorepo Commands + +From the monorepo root: + +- `bun install` +- `bun run dev` +- `bun run build` +- `bun run typecheck` + ## Base Template Commands From `templates/base`: diff --git a/betterbase/apps/cli/tsconfig.json b/betterbase/apps/cli/tsconfig.json index 4031161..cd17ee5 100644 --- a/betterbase/apps/cli/tsconfig.json +++ b/betterbase/apps/cli/tsconfig.json @@ -3,7 +3,14 @@ "compilerOptions": { "outDir": "dist", "rootDir": ".", - "types": ["bun"] + "types": [ + "bun" + ] }, - "include": ["src", "test"] + "include": [ + "src/**/*.ts", + "src/**/*.tsx", + "test/**/*.ts", + "test/**/*.tsx" + ] } diff --git a/betterbase/package.json b/betterbase/package.json index eaa5a17..c4b36e5 100644 --- a/betterbase/package.json +++ b/betterbase/package.json @@ -1,7 +1,7 @@ { "name": "betterbase", "private": true, - "packageManager": "bun@1.1.38", + "packageManager": "bun@1.2.14", "workspaces": [ "apps/*", "packages/*" @@ -10,7 +10,7 @@ "build": "turbo run build", "dev": "turbo run dev --parallel", "lint": "turbo run lint", - "typecheck": "turbo run typecheck" + "typecheck": "bun run --filter '*' typecheck" }, "devDependencies": { "turbo": "^2.0.0", diff --git a/betterbase/packages/cli/src/build.ts b/betterbase/packages/cli/src/build.ts index 198205e..86937ef 100644 --- a/betterbase/packages/cli/src/build.ts +++ b/betterbase/packages/cli/src/build.ts @@ -1,10 +1,16 @@ +import path from 'node:path'; + /** * Build the CLI as a standalone bundled executable output. */ export async function buildStandaloneCli(): Promise { + const moduleDir = import.meta.dir; + const entrypoint = path.resolve(moduleDir, 'index.ts'); + const outdir = path.resolve(moduleDir, '../dist'); + const result = await Bun.build({ - entrypoints: ['./src/index.ts'], - outdir: './dist', + entrypoints: [entrypoint], + outdir, target: 'bun', format: 'esm', minify: false, @@ -17,7 +23,7 @@ export async function buildStandaloneCli(): Promise { throw new Error(`Build failed with ${result.logs.length} error(s).\n${diagnostics}`); } - const outputPath = './dist/index.js'; + const outputPath = path.join(outdir, 'index.js'); const compiled = await Bun.file(outputPath).text(); await Bun.write(outputPath, `#!/usr/bin/env bun\n${compiled}`); } @@ -26,11 +32,7 @@ async function main(): Promise { await buildStandaloneCli(); } -const isEsmMain = typeof import.meta !== 'undefined' && import.meta.main; -const cjs = globalThis as unknown as { require?: { main?: unknown }; module?: unknown }; -const isCjsMain = cjs.require?.main !== undefined && cjs.require.main === cjs.module; - -if (isEsmMain || isCjsMain) { +if (import.meta.main) { main().catch((error) => { console.error('Build failed:', error); process.exit(1); diff --git a/betterbase/packages/cli/src/commands/init.ts b/betterbase/packages/cli/src/commands/init.ts index 5859b89..2fa594a 100644 --- a/betterbase/packages/cli/src/commands/init.ts +++ b/betterbase/packages/cli/src/commands/init.ts @@ -63,8 +63,8 @@ async function initializeGitRepository(projectPath: string): Promise { function buildPackageJson(projectName: string, databaseMode: DatabaseMode, useAuth: boolean): string { const dependencies: Record = { hono: '^4.11.9', - 'drizzle-orm': '^0.44.5', - zod: '^3.25.76', + 'drizzle-orm': '^0.45.1', + zod: '^4.3.6', }; if (databaseMode === 'turso') { @@ -427,7 +427,7 @@ bun.lockb .env.* !.env.example local.db -.drizzle +drizzle/ `, ); @@ -447,7 +447,7 @@ export const healthRoute = new Hono(); healthRoute.get('/', async (c) => { try { - await db.run(sql\`select 1\`); + await db.${databaseMode === 'local' ? 'run' : 'execute'}(sql\`select 1\`); return c.json({ status: 'healthy', @@ -510,9 +510,50 @@ const createUserSchema = z.object({ export const usersRoute = new Hono(); +const DEFAULT_LIMIT = 25; +const MAX_LIMIT = 100; +const DEFAULT_OFFSET = 0; + +function parseNonNegativeInt(value: string | undefined, fallback: number): number { + if (!value) { + return fallback; + } + + const parsed = Number(value); + if (!Number.isInteger(parsed) || parsed < 0) { + return fallback; + } + + return parsed; +} + usersRoute.get('/', async (c) => { - const allUsers = await db.select().from(users); - return c.json({ users: allUsers }); + const requestedLimit = parseNonNegativeInt(c.req.query('limit'), DEFAULT_LIMIT); + const limit = Math.min(requestedLimit, MAX_LIMIT); + const effectiveLimit = Math.max(limit, 1); + const offset = parseNonNegativeInt(c.req.query('offset'), DEFAULT_OFFSET); + + try { + const rows = await db.select().from(users).limit(effectiveLimit + 1).offset(offset); + const hasMore = limit === 0 ? false : rows.length > limit; + const paginatedUsers = limit === 0 ? [] : rows.slice(0, limit); + + return c.json({ + users: paginatedUsers, + pagination: { + limit, + offset, + hasMore, + }, + }); + } catch (error) { + if (error instanceof HTTPException) { + throw error; + } + + console.error('Failed to fetch users:', error); + throw error; + } }); usersRoute.post('/', async (c) => { diff --git a/betterbase/packages/cli/src/commands/migrate.ts b/betterbase/packages/cli/src/commands/migrate.ts index 602915b..a805ddb 100644 --- a/betterbase/packages/cli/src/commands/migrate.ts +++ b/betterbase/packages/cli/src/commands/migrate.ts @@ -1,32 +1,351 @@ +import chalk from 'chalk'; +import { mkdir, readdir } from 'node:fs/promises'; +import path from 'node:path'; import { z } from 'zod'; import * as logger from '../utils/logger'; import * as prompts from '../utils/prompts'; const migrateOptionsSchema = z.object({ - destructive: z.boolean().optional(), + preview: z.boolean().optional(), + production: z.boolean().optional(), }); export type MigrateCommandOptions = z.infer; +export type MigrationChangeType = + | 'create_table' + | 'add_column' + | 'modify_column' + | 'drop_column' + | 'drop_table'; + +export interface MigrationChange { + type: MigrationChangeType; + table: string; + column?: string; + detail?: string; + isDestructive: boolean; +} + +interface DrizzleResult { + success: boolean; + stdout: string; + stderr: string; + exitCode: number; +} + +interface MigrationBackup { + sourcePath: string; + backupPath: string; +} + +const DRIZZLE_DIR = 'drizzle'; +const DEFAULT_DB_PATH = 'local.db'; + +async function runDrizzleKit(args: string[]): Promise { + const proc = Bun.spawn(['bunx', 'drizzle-kit', ...args], { + cwd: process.cwd(), + stdout: 'pipe', + stderr: 'pipe', + }); + + const [stdout, stderr, exitCode] = await Promise.all([ + new Response(proc.stdout).text(), + new Response(proc.stderr).text(), + proc.exited, + ]); + + return { + success: exitCode === 0, + stdout, + stderr, + exitCode, + }; +} + +async function listSqlFiles(baseDir: string): Promise> { + const entries = new Map(); + const root = path.join(process.cwd(), baseDir); + + const walk = async (dir: string): Promise => { + if (!(await Bun.file(dir).exists())) { + return; + } + + for (const entry of await readdir(dir, { withFileTypes: true })) { + const fullPath = path.join(dir, entry.name); + + if (entry.isDirectory()) { + await walk(fullPath); + continue; + } + + const stat = await Bun.file(fullPath).stat(); + if (stat.isDirectory()) { + await walk(fullPath); + continue; + } + + if (!fullPath.endsWith('.sql')) { + continue; + } + + entries.set(path.relative(root, fullPath), await Bun.file(fullPath).text()); + } + }; + + await walk(root); + return entries; +} + +function analyzeMigration(sqlStatements: string[]): MigrationChange[] { + const changes: MigrationChange[] = []; + + for (const statement of sqlStatements) { + const sql = statement.trim(); + if (!sql) { + continue; + } + + const createTable = sql.match(/create\s+table\s+"?([\w.-]+)"?/i); + if (createTable) { + changes.push({ type: 'create_table', table: createTable[1], isDestructive: false, detail: sql }); + continue; + } + + const dropTable = sql.match(/drop\s+table\s+"?([\w.-]+)"?/i); + if (dropTable) { + changes.push({ type: 'drop_table', table: dropTable[1], isDestructive: true, detail: sql }); + continue; + } + + const addColumn = sql.match(/alter\s+table\s+"?([\w.-]+)"?\s+add\s+column\s+"?([\w.-]+)"?/i); + if (addColumn) { + changes.push({ + type: 'add_column', + table: addColumn[1], + column: addColumn[2], + isDestructive: false, + detail: sql, + }); + continue; + } + + const dropColumn = sql.match(/alter\s+table\s+"?([\w.-]+)"?\s+drop\s+column\s+"?([\w.-]+)"?/i); + if (dropColumn) { + changes.push({ + type: 'drop_column', + table: dropColumn[1], + column: dropColumn[2], + isDestructive: true, + detail: sql, + }); + continue; + } + + const alterColumn = sql.match( + /alter\s+table\s+"?([\w.-]+)"?\s+(alter\s+column\s+"?([\w.-]+)"?|rename\s+column\s+"?([\w.-]+)"?)/i, + ); + if (alterColumn) { + changes.push({ + type: 'modify_column', + table: alterColumn[1], + column: alterColumn[3] ?? alterColumn[4], + isDestructive: /drop\s+not\s+null|set\s+not\s+null|set\s+data\s+type/i.test(sql), + detail: sql, + }); + continue; + } + } + + return changes; +} + +function displayDiff(changes: MigrationChange[]): void { + console.log('\nšŸ“Š Migration Preview\n'); + + if (changes.length === 0) { + console.log(chalk.gray('No schema changes detected.')); + return; + } + + const newTables = changes.filter((c) => c.type === 'create_table'); + const newColumns = changes.filter((c) => c.type === 'add_column'); + const modified = changes.filter((c) => c.type === 'modify_column'); + const destructive = changes.filter((c) => c.isDestructive); + + if (newTables.length > 0) { + console.log(chalk.green('āœ… New Tables:')); + for (const change of newTables) { + console.log(chalk.green(` + ${change.table}`)); + } + console.log(''); + } + + if (newColumns.length > 0) { + console.log(chalk.green('āœ… New Columns:')); + for (const change of newColumns) { + console.log(chalk.green(` + ${change.table}.${change.column ?? ''}`)); + } + console.log(''); + } + + if (modified.length > 0) { + console.log(chalk.yellow('āš ļø Modified Columns:')); + for (const change of modified) { + console.log(chalk.yellow(` ! ${change.table}.${change.column ?? ''}`)); + } + console.log(''); + } + + if (destructive.length > 0) { + console.log(chalk.red('āŒ Destructive Changes:')); + for (const change of destructive) { + console.log(chalk.red(` - ${change.type}: ${change.table}${change.column ? `.${change.column}` : ''}`)); + console.log(chalk.red(' āš ļø This will DELETE DATA')); + } + console.log(''); + } +} + +async function confirmDestructive(changes: MigrationChange[]): Promise { + const destructive = changes.filter((c) => c.isDestructive); + + if (destructive.length === 0) { + return true; + } + + logger.warn('DESTRUCTIVE CHANGES DETECTED:'); + for (const change of destructive) { + console.log(` - ${change.type}: ${change.table}${change.column ? `.${change.column}` : ''}`); + } + + const confirmation = await prompts.text({ message: 'Type "delete data" to confirm:' }); + if (confirmation !== 'delete data') { + logger.warn('Confirmation phrase mismatch. Migration cancelled.'); + return false; + } + + return true; +} + +async function backupDatabase(): Promise { + const sourcePath = process.env.DB_PATH ?? DEFAULT_DB_PATH; + const source = Bun.file(sourcePath); + + if (!(await source.exists())) { + logger.warn(`No local database found at ${sourcePath}; skipping backup.`); + return null; + } + + const timestamp = new Date().toISOString().replace(/:/g, '-'); + const backupDir = path.join(process.cwd(), 'backups'); + await mkdir(backupDir, { recursive: true }); + + const backupPath = path.join(backupDir, `db-${timestamp}.sqlite`); + await Bun.write(backupPath, source); + logger.success(`Backup saved: ${backupPath}`); + + return { sourcePath, backupPath }; +} + +async function restoreBackup(backup: MigrationBackup | null): Promise { + if (backup === null) { + return; + } + + await Bun.write(backup.sourcePath, Bun.file(backup.backupPath)); + logger.warn(`Rollback complete. Restored database from ${backup.backupPath}`); +} + +function splitStatements(sql: string): string[] { + return sql + .split(/;\s*(?:\n|$)/g) + .map((statement) => statement.trim()) + .filter((statement) => statement.length > 0); +} + +async function collectChangesFromGenerate(): Promise { + const before = await listSqlFiles(DRIZZLE_DIR); + const generate = await runDrizzleKit(['generate']); + + if (!generate.success) { + if (/conflict|merge/i.test(generate.stderr)) { + throw new Error(`Migration conflict detected. Resolve migration files manually.\n${generate.stderr}`); + } + + throw new Error(`Failed to generate migrations.\n${generate.stderr || generate.stdout}`); + } + + const after = await listSqlFiles(DRIZZLE_DIR); + const changedSql: string[] = []; + + for (const [relativePath, content] of after.entries()) { + const previous = before.get(relativePath); + if (previous === content) { + continue; + } + + changedSql.push(...splitStatements(content)); + } + + return analyzeMigration(changedSql); +} + /** * Run the `bb migrate` command. */ export async function runMigrateCommand(rawOptions: MigrateCommandOptions): Promise { const options = migrateOptionsSchema.parse(rawOptions); - const shouldContinue = - options.destructive === true - ? true - : await prompts.confirm({ - message: 'This migration may include destructive changes. Continue?', - initial: false, - }); + logger.info('Generating migration files with drizzle-kit...'); + const changes = await collectChangesFromGenerate(); + displayDiff(changes); - if (!shouldContinue) { - logger.warn('Migration cancelled by user.'); + if (options.preview) { + logger.info('Preview mode enabled. No migrations applied.'); return; } - logger.info('Analyzing migration plan...'); - logger.success('Migration scaffold complete. (Placeholder implementation)'); + if (options.production) { + const proceed = await prompts.confirm({ + message: 'Apply migrations to production now?', + initial: false, + }); + + if (!proceed) { + logger.warn('Migration cancelled by user.'); + return; + } + } + + let backup: MigrationBackup | null = null; + + if (changes.some((change) => change.isDestructive)) { + backup = await backupDatabase(); + + const confirmed = await confirmDestructive(changes); + if (!confirmed) { + return; + } + } + + logger.info('Applying migrations with drizzle-kit push...'); + const push = await runDrizzleKit(['push']); + + if (!push.success) { + await restoreBackup(backup); + + if (/connect|econn|database/i.test(push.stderr)) { + throw new Error(`Database connection failed while applying migration.\n${push.stderr}`); + } + + if (/conflict|merge/i.test(push.stderr)) { + throw new Error(`Migration conflict detected during push. Please resolve and retry.\n${push.stderr}`); + } + + throw new Error(`Migration push failed.\n${push.stderr || push.stdout}`); + } + + logger.success('Migration complete!'); } diff --git a/betterbase/packages/cli/src/index.ts b/betterbase/packages/cli/src/index.ts index e0136da..932ab3c 100644 --- a/betterbase/packages/cli/src/index.ts +++ b/betterbase/packages/cli/src/index.ts @@ -26,10 +26,23 @@ export function createProgram(): Command { program .command('migrate') - .description('Run BetterBase database migrations') - .option('--destructive', 'allow destructive migration flow') - .action(async (options: { destructive?: boolean }) => { - await runMigrateCommand({ destructive: options.destructive }); + .description('Generate and apply migrations for local development') + .action(async () => { + await runMigrateCommand({}); + }); + + program + .command('migrate:preview') + .description('Preview migration diff without applying changes') + .action(async () => { + await runMigrateCommand({ preview: true }); + }); + + program + .command('migrate:production') + .description('Apply migrations to production (requires confirmation)') + .action(async () => { + await runMigrateCommand({ production: true }); }); return program; diff --git a/betterbase/packages/cli/test/smoke.test.ts b/betterbase/packages/cli/test/smoke.test.ts index f082470..cb9d36a 100644 --- a/betterbase/packages/cli/test/smoke.test.ts +++ b/betterbase/packages/cli/test/smoke.test.ts @@ -14,9 +14,15 @@ describe('cli', () => { expect(init?.registeredArguments[0]?.name()).toBe('project-name'); }); - test('registers migrate command', () => { + test('registers migrate commands', () => { const program = createProgram(); + const migrate = program.commands.find((command) => command.name() === 'migrate'); + const preview = program.commands.find((command) => command.name() === 'migrate:preview'); + const production = program.commands.find((command) => command.name() === 'migrate:production'); + expect(migrate).toBeDefined(); + expect(preview).toBeDefined(); + expect(production).toBeDefined(); }); }); diff --git a/betterbase/templates/base/README.md b/betterbase/templates/base/README.md index e6d4cf1..8d481e3 100644 --- a/betterbase/templates/base/README.md +++ b/betterbase/templates/base/README.md @@ -26,3 +26,15 @@ src/ betterbase.config.ts drizzle.config.ts ``` + + +## Quick Start + +- Install dependencies: `bun install` +- Start development server: `bun run dev` +- Generate Drizzle migrations: `bun run db:generate` +- Apply migrations locally: `bun run db:push` +- Build for production: `bun run build` +- Start production server: `bun run start` + +Environment variables are validated in `src/lib/env.ts` (`NODE_ENV`, `PORT`, `DB_PATH`). diff --git a/betterbase/templates/base/package.json b/betterbase/templates/base/package.json index e631611..c74eee0 100644 --- a/betterbase/templates/base/package.json +++ b/betterbase/templates/base/package.json @@ -6,7 +6,9 @@ "dev": "bun --hot run src/index.ts", "db:generate": "drizzle-kit generate", "db:push": "bun run src/db/migrate.ts", - "typecheck": "tsc --noEmit" + "typecheck": "tsc --noEmit", + "build": "bun build src/index.ts --outfile dist/index.js --target bun", + "start": "bun run dist/index.js" }, "dependencies": { "hono": "^4.6.10", diff --git a/betterbase/templates/base/src/db/index.ts b/betterbase/templates/base/src/db/index.ts index ddbbc3f..3a149bf 100644 --- a/betterbase/templates/base/src/db/index.ts +++ b/betterbase/templates/base/src/db/index.ts @@ -1,8 +1,11 @@ import { Database } from 'bun:sqlite'; import { drizzle } from 'drizzle-orm/bun-sqlite'; +import { env, DEFAULT_DB_PATH } from '../lib/env'; import * as schema from './schema'; -const dbPath = process.env.DB_PATH ?? 'local.db'; -const sqlite = new Database(dbPath, { create: true }); +export { DEFAULT_DB_PATH }; + +// env.DB_PATH is always present because env schema provides a default. +const sqlite = new Database(env.DB_PATH, { create: true }); export const db = drizzle(sqlite, { schema }); diff --git a/betterbase/templates/base/src/db/migrate.ts b/betterbase/templates/base/src/db/migrate.ts index c590040..2bdd9bf 100644 --- a/betterbase/templates/base/src/db/migrate.ts +++ b/betterbase/templates/base/src/db/migrate.ts @@ -1,9 +1,10 @@ import { Database } from 'bun:sqlite'; import { drizzle } from 'drizzle-orm/bun-sqlite'; import { migrate } from 'drizzle-orm/bun-sqlite/migrator'; +import { DEFAULT_DB_PATH } from '../lib/env'; try { - const sqlite = new Database(process.env.DB_PATH ?? 'local.db', { create: true }); + const sqlite = new Database(DEFAULT_DB_PATH, { create: true }); const db = drizzle(sqlite); migrate(db, { migrationsFolder: './drizzle' }); diff --git a/betterbase/templates/base/src/index.ts b/betterbase/templates/base/src/index.ts index 4065ba5..359c1c5 100644 --- a/betterbase/templates/base/src/index.ts +++ b/betterbase/templates/base/src/index.ts @@ -1,4 +1,5 @@ import { Hono } from 'hono'; +import { env } from './lib/env'; import { registerRoutes } from './routes'; const app = new Hono(); @@ -6,8 +7,8 @@ registerRoutes(app); const server = Bun.serve({ fetch: app.fetch, - port: Number(process.env.PORT ?? 3000), - development: process.env.NODE_ENV === 'development', + port: env.PORT, + development: env.NODE_ENV === 'development', }); console.log(`šŸš€ Server running at http://localhost:${server.port}`); diff --git a/betterbase/templates/base/src/lib/env.ts b/betterbase/templates/base/src/lib/env.ts index c4a0b38..c61b432 100644 --- a/betterbase/templates/base/src/lib/env.ts +++ b/betterbase/templates/base/src/lib/env.ts @@ -1,8 +1,11 @@ import { z } from 'zod'; +export const DEFAULT_DB_PATH = 'local.db'; + const envSchema = z.object({ NODE_ENV: z.enum(['development', 'test', 'production']).default('development'), PORT: z.coerce.number().default(3000), + DB_PATH: z.string().min(1).default(DEFAULT_DB_PATH), }); export const env = envSchema.parse(process.env); diff --git a/betterbase/templates/base/src/routes/index.ts b/betterbase/templates/base/src/routes/index.ts index 64a9e83..2fb73e6 100644 --- a/betterbase/templates/base/src/routes/index.ts +++ b/betterbase/templates/base/src/routes/index.ts @@ -2,6 +2,7 @@ import { Hono } from 'hono'; import { cors } from 'hono/cors'; import { logger } from 'hono/logger'; import { HTTPException } from 'hono/http-exception'; +import { env } from '../lib/env'; import { healthRoute } from './health'; import { usersRoute } from './users'; @@ -11,12 +12,12 @@ export function registerRoutes(app: Hono): void { app.onError((err, c) => { const isHttpError = err instanceof HTTPException; - const showDetailedError = process.env.NODE_ENV === 'development' || isHttpError; + const showDetailedError = env.NODE_ENV === 'development' || isHttpError; return c.json( { error: showDetailedError ? err.message : 'Internal Server Error', - stack: process.env.NODE_ENV === 'development' ? err.stack : undefined, + stack: env.NODE_ENV === 'development' ? err.stack : undefined, details: isHttpError ? (err as { cause?: unknown }).cause ?? null : null, }, isHttpError ? err.status : 500, diff --git a/betterbase/templates/base/src/routes/users.ts b/betterbase/templates/base/src/routes/users.ts index 29f17c6..be22408 100644 --- a/betterbase/templates/base/src/routes/users.ts +++ b/betterbase/templates/base/src/routes/users.ts @@ -10,11 +10,52 @@ export const createUserSchema = z.object({ name: z.string().min(1), }); +const DEFAULT_LIMIT = 25; +const MAX_LIMIT = 100; +const DEFAULT_OFFSET = 0; + +function parseNonNegativeInt(value: string | undefined, fallback: number): number { + if (!value) { + return fallback; + } + + const parsed = Number(value); + if (!Number.isInteger(parsed) || parsed < 0) { + return fallback; + } + + return parsed; +} + export const usersRoute = new Hono(); usersRoute.get('/', async (c) => { - const allUsers = await db.select().from(users); - return c.json({ users: allUsers }); + const requestedLimit = parseNonNegativeInt(c.req.query('limit'), DEFAULT_LIMIT); + const limit = Math.min(requestedLimit, MAX_LIMIT); + const effectiveLimit = Math.max(limit, 1); + const offset = parseNonNegativeInt(c.req.query('offset'), DEFAULT_OFFSET); + + try { + const rows = await db.select().from(users).limit(effectiveLimit + 1).offset(offset); + const hasMore = limit === 0 ? false : rows.length > limit; + const paginatedUsers = limit === 0 ? [] : rows.slice(0, limit); + + return c.json({ + users: paginatedUsers, + pagination: { + limit, + offset, + hasMore, + }, + }); + } catch (error) { + if (error instanceof HTTPException) { + throw error; + } + + console.error('Failed to fetch users:', error); + throw error; + } }); usersRoute.post('/', async (c) => { diff --git a/betterbase/tsconfig.base.json b/betterbase/tsconfig.base.json index b1d6a90..e86a10c 100644 --- a/betterbase/tsconfig.base.json +++ b/betterbase/tsconfig.base.json @@ -8,6 +8,7 @@ "skipLibCheck": true, "resolveJsonModule": true, "isolatedModules": true, - "forceConsistentCasingInFileNames": true + "forceConsistentCasingInFileNames": true, + "declaration": true } } From 3abe18c3ebda637fd9024dfc91f94bb56ca7e33f Mon Sep 17 00:00:00 2001 From: Ziad Khaled Date: Thu, 19 Feb 2026 03:09:27 +0200 Subject: [PATCH 05/12] Address review feedback for migrate and scaffold generation --- betterbase/package.json | 2 +- betterbase/packages/cli/src/commands/init.ts | 23 ++++++--- .../packages/cli/src/commands/migrate.ts | 49 +++++++++++-------- betterbase/packages/cli/src/constants.ts | 1 + betterbase/templates/base/src/db/index.ts | 3 +- betterbase/templates/base/src/db/migrate.ts | 3 +- betterbase/templates/base/src/routes/users.ts | 44 ++++++++++------- 7 files changed, 73 insertions(+), 52 deletions(-) create mode 100644 betterbase/packages/cli/src/constants.ts diff --git a/betterbase/package.json b/betterbase/package.json index c4b36e5..0f4c3d8 100644 --- a/betterbase/package.json +++ b/betterbase/package.json @@ -10,7 +10,7 @@ "build": "turbo run build", "dev": "turbo run dev --parallel", "lint": "turbo run lint", - "typecheck": "bun run --filter '*' typecheck" + "typecheck": "turbo run typecheck --filter '*'" }, "devDependencies": { "turbo": "^2.0.0", diff --git a/betterbase/packages/cli/src/commands/init.ts b/betterbase/packages/cli/src/commands/init.ts index 2fa594a..ccc3741 100644 --- a/betterbase/packages/cli/src/commands/init.ts +++ b/betterbase/packages/cli/src/commands/init.ts @@ -427,7 +427,6 @@ bun.lockb .env.* !.env.example local.db -drizzle/ `, ); @@ -471,9 +470,9 @@ healthRoute.get('/', async (c) => { await writeFile( path.join(projectPath, 'src/middleware/validation.ts'), `import { HTTPException } from 'hono/http-exception'; -import type { ZodType } from 'zod'; +import { z } from 'zod'; -export function parseBody(schema: ZodType, body: unknown): T { +export function parseBody(schema: S, body: unknown): z.output { const result = schema.safeParse(body); if (!result.success) { @@ -530,13 +529,23 @@ function parseNonNegativeInt(value: string | undefined, fallback: number): numbe usersRoute.get('/', async (c) => { const requestedLimit = parseNonNegativeInt(c.req.query('limit'), DEFAULT_LIMIT); const limit = Math.min(requestedLimit, MAX_LIMIT); - const effectiveLimit = Math.max(limit, 1); const offset = parseNonNegativeInt(c.req.query('offset'), DEFAULT_OFFSET); + if (limit === 0) { + return c.json({ + users: [], + pagination: { + limit, + offset, + hasMore: false, + }, + }); + } + try { - const rows = await db.select().from(users).limit(effectiveLimit + 1).offset(offset); - const hasMore = limit === 0 ? false : rows.length > limit; - const paginatedUsers = limit === 0 ? [] : rows.slice(0, limit); + const rows = await db.select().from(users).limit(limit + 1).offset(offset); + const hasMore = rows.length > limit; + const paginatedUsers = rows.slice(0, limit); return c.json({ users: paginatedUsers, diff --git a/betterbase/packages/cli/src/commands/migrate.ts b/betterbase/packages/cli/src/commands/migrate.ts index a805ddb..37573f8 100644 --- a/betterbase/packages/cli/src/commands/migrate.ts +++ b/betterbase/packages/cli/src/commands/migrate.ts @@ -1,7 +1,9 @@ +import { Database } from 'bun:sqlite'; import chalk from 'chalk'; -import { mkdir, readdir } from 'node:fs/promises'; +import { access, mkdir, readdir } from 'node:fs/promises'; import path from 'node:path'; import { z } from 'zod'; +import { DEFAULT_DB_PATH } from '../constants'; import * as logger from '../utils/logger'; import * as prompts from '../utils/prompts'; @@ -31,7 +33,6 @@ interface DrizzleResult { success: boolean; stdout: string; stderr: string; - exitCode: number; } interface MigrationBackup { @@ -40,7 +41,6 @@ interface MigrationBackup { } const DRIZZLE_DIR = 'drizzle'; -const DEFAULT_DB_PATH = 'local.db'; async function runDrizzleKit(args: string[]): Promise { const proc = Bun.spawn(['bunx', 'drizzle-kit', ...args], { @@ -59,7 +59,6 @@ async function runDrizzleKit(args: string[]): Promise { success: exitCode === 0, stdout, stderr, - exitCode, }; } @@ -68,7 +67,9 @@ async function listSqlFiles(baseDir: string): Promise> { const root = path.join(process.cwd(), baseDir); const walk = async (dir: string): Promise => { - if (!(await Bun.file(dir).exists())) { + try { + await access(dir); + } catch { return; } @@ -80,12 +81,6 @@ async function listSqlFiles(baseDir: string): Promise> { continue; } - const stat = await Bun.file(fullPath).stat(); - if (stat.isDirectory()) { - await walk(fullPath); - continue; - } - if (!fullPath.endsWith('.sql')) { continue; } @@ -151,7 +146,7 @@ function analyzeMigration(sqlStatements: string[]): MigrationChange[] { type: 'modify_column', table: alterColumn[1], column: alterColumn[3] ?? alterColumn[4], - isDestructive: /drop\s+not\s+null|set\s+not\s+null|set\s+data\s+type/i.test(sql), + isDestructive: /drop\s+not\s+null|set\s+not\s+null|set\s+data\s+type|rename\s+column/i.test(sql), detail: sql, }); continue; @@ -210,7 +205,6 @@ function displayDiff(changes: MigrationChange[]): void { async function confirmDestructive(changes: MigrationChange[]): Promise { const destructive = changes.filter((c) => c.isDestructive); - if (destructive.length === 0) { return true; } @@ -231,9 +225,10 @@ async function confirmDestructive(changes: MigrationChange[]): Promise async function backupDatabase(): Promise { const sourcePath = process.env.DB_PATH ?? DEFAULT_DB_PATH; - const source = Bun.file(sourcePath); - if (!(await source.exists())) { + try { + await access(sourcePath); + } catch { logger.warn(`No local database found at ${sourcePath}; skipping backup.`); return null; } @@ -243,9 +238,16 @@ async function backupDatabase(): Promise { await mkdir(backupDir, { recursive: true }); const backupPath = path.join(backupDir, `db-${timestamp}.sqlite`); - await Bun.write(backupPath, source); - logger.success(`Backup saved: ${backupPath}`); + const db = new Database(sourcePath, { readonly: true }); + try { + const snapshot = db.serialize(); + await Bun.write(backupPath, snapshot); + } finally { + db.close(); + } + + logger.success(`Backup saved: ${backupPath}`); return { sourcePath, backupPath }; } @@ -254,7 +256,15 @@ async function restoreBackup(backup: MigrationBackup | null): Promise { return; } - await Bun.write(backup.sourcePath, Bun.file(backup.backupPath)); + const bytes = await Bun.file(backup.backupPath).bytes(); + const restoredDb = Database.deserialize(bytes); + + try { + await Bun.write(backup.sourcePath, restoredDb.serialize()); + } finally { + restoredDb.close(); + } + logger.warn(`Rollback complete. Restored database from ${backup.backupPath}`); } @@ -292,9 +302,6 @@ async function collectChangesFromGenerate(): Promise { return analyzeMigration(changedSql); } -/** - * Run the `bb migrate` command. - */ export async function runMigrateCommand(rawOptions: MigrateCommandOptions): Promise { const options = migrateOptionsSchema.parse(rawOptions); diff --git a/betterbase/packages/cli/src/constants.ts b/betterbase/packages/cli/src/constants.ts new file mode 100644 index 0000000..8daff7f --- /dev/null +++ b/betterbase/packages/cli/src/constants.ts @@ -0,0 +1 @@ +export const DEFAULT_DB_PATH = 'local.db'; diff --git a/betterbase/templates/base/src/db/index.ts b/betterbase/templates/base/src/db/index.ts index 3a149bf..485c7b4 100644 --- a/betterbase/templates/base/src/db/index.ts +++ b/betterbase/templates/base/src/db/index.ts @@ -1,9 +1,8 @@ import { Database } from 'bun:sqlite'; import { drizzle } from 'drizzle-orm/bun-sqlite'; -import { env, DEFAULT_DB_PATH } from '../lib/env'; +import { env } from '../lib/env'; import * as schema from './schema'; -export { DEFAULT_DB_PATH }; // env.DB_PATH is always present because env schema provides a default. const sqlite = new Database(env.DB_PATH, { create: true }); diff --git a/betterbase/templates/base/src/db/migrate.ts b/betterbase/templates/base/src/db/migrate.ts index 2bdd9bf..c590040 100644 --- a/betterbase/templates/base/src/db/migrate.ts +++ b/betterbase/templates/base/src/db/migrate.ts @@ -1,10 +1,9 @@ import { Database } from 'bun:sqlite'; import { drizzle } from 'drizzle-orm/bun-sqlite'; import { migrate } from 'drizzle-orm/bun-sqlite/migrator'; -import { DEFAULT_DB_PATH } from '../lib/env'; try { - const sqlite = new Database(DEFAULT_DB_PATH, { create: true }); + const sqlite = new Database(process.env.DB_PATH ?? 'local.db', { create: true }); const db = drizzle(sqlite); migrate(db, { migrationsFolder: './drizzle' }); diff --git a/betterbase/templates/base/src/routes/users.ts b/betterbase/templates/base/src/routes/users.ts index be22408..5f2605b 100644 --- a/betterbase/templates/base/src/routes/users.ts +++ b/betterbase/templates/base/src/routes/users.ts @@ -14,31 +14,37 @@ const DEFAULT_LIMIT = 25; const MAX_LIMIT = 100; const DEFAULT_OFFSET = 0; -function parseNonNegativeInt(value: string | undefined, fallback: number): number { - if (!value) { - return fallback; - } - - const parsed = Number(value); - if (!Number.isInteger(parsed) || parsed < 0) { - return fallback; - } - - return parsed; -} +const paginationSchema = z.object({ + limit: z.coerce.number().int().nonnegative().default(DEFAULT_LIMIT), + offset: z.coerce.number().int().nonnegative().default(DEFAULT_OFFSET), +}); export const usersRoute = new Hono(); usersRoute.get('/', async (c) => { - const requestedLimit = parseNonNegativeInt(c.req.query('limit'), DEFAULT_LIMIT); - const limit = Math.min(requestedLimit, MAX_LIMIT); - const effectiveLimit = Math.max(limit, 1); - const offset = parseNonNegativeInt(c.req.query('offset'), DEFAULT_OFFSET); + const pagination = paginationSchema.parse({ + limit: c.req.query('limit') ?? undefined, + offset: c.req.query('offset') ?? undefined, + }); + + const limit = Math.min(pagination.limit, MAX_LIMIT); + const offset = pagination.offset; + + if (limit === 0) { + return c.json({ + users: [], + pagination: { + limit, + offset, + hasMore: false, + }, + }); + } try { - const rows = await db.select().from(users).limit(effectiveLimit + 1).offset(offset); - const hasMore = limit === 0 ? false : rows.length > limit; - const paginatedUsers = limit === 0 ? [] : rows.slice(0, limit); + const rows = await db.select().from(users).limit(limit + 1).offset(offset); + const hasMore = rows.length > limit; + const paginatedUsers = rows.slice(0, limit); return c.json({ users: paginatedUsers, From 574fbba8bc106fa55d42c382d5e8b8f5f250b2a2 Mon Sep 17 00:00:00 2001 From: Ziad Khaled Date: Thu, 19 Feb 2026 03:30:30 +0200 Subject: [PATCH 06/12] Add TypeScript schema scanner for Drizzle schema extraction --- betterbase/.gitignore | 6 +- betterbase/README.md | 10 + betterbase/apps/cli/tsconfig.json | 11 +- betterbase/package.json | 4 +- betterbase/packages/cli/package.json | 7 +- betterbase/packages/cli/src/build.ts | 18 +- betterbase/packages/cli/src/commands/init.ts | 66 +++- .../packages/cli/src/commands/migrate.ts | 356 +++++++++++++++++- betterbase/packages/cli/src/constants.ts | 1 + betterbase/packages/cli/src/index.ts | 21 +- betterbase/packages/cli/src/utils/scanner.ts | 251 ++++++++++++ betterbase/packages/cli/test/scanner.test.ts | 61 +++ betterbase/packages/cli/test/smoke.test.ts | 8 +- betterbase/templates/base/README.md | 12 + betterbase/templates/base/package.json | 4 +- betterbase/templates/base/src/db/index.ts | 6 +- betterbase/templates/base/src/index.ts | 5 +- betterbase/templates/base/src/lib/env.ts | 3 + betterbase/templates/base/src/routes/index.ts | 5 +- betterbase/templates/base/src/routes/users.ts | 51 ++- betterbase/tsconfig.base.json | 3 +- 21 files changed, 854 insertions(+), 55 deletions(-) create mode 100644 betterbase/packages/cli/src/constants.ts create mode 100644 betterbase/packages/cli/src/utils/scanner.ts create mode 100644 betterbase/packages/cli/test/scanner.test.ts diff --git a/betterbase/.gitignore b/betterbase/.gitignore index 51c8bb1..9b75d8d 100644 --- a/betterbase/.gitignore +++ b/betterbase/.gitignore @@ -3,14 +3,16 @@ node_modules .turbo dist .next +*.sqlite3 +*.sqlite +*.db +*.tsbuildinfo .vscode/ .idea/ .env .env.* -.env.local -.env.test !.env.example *.log diff --git a/betterbase/README.md b/betterbase/README.md index 46bf456..d9aaed2 100644 --- a/betterbase/README.md +++ b/betterbase/README.md @@ -19,6 +19,16 @@ Initial BetterBase monorepo scaffold with a concrete base template. - Workspace orchestration: **Turborepo** - Language: **TypeScript** + +## Monorepo Commands + +From the monorepo root: + +- `bun install` +- `bun run dev` +- `bun run build` +- `bun run typecheck` + ## Base Template Commands From `templates/base`: diff --git a/betterbase/apps/cli/tsconfig.json b/betterbase/apps/cli/tsconfig.json index 4031161..cd17ee5 100644 --- a/betterbase/apps/cli/tsconfig.json +++ b/betterbase/apps/cli/tsconfig.json @@ -3,7 +3,14 @@ "compilerOptions": { "outDir": "dist", "rootDir": ".", - "types": ["bun"] + "types": [ + "bun" + ] }, - "include": ["src", "test"] + "include": [ + "src/**/*.ts", + "src/**/*.tsx", + "test/**/*.ts", + "test/**/*.tsx" + ] } diff --git a/betterbase/package.json b/betterbase/package.json index eaa5a17..0f4c3d8 100644 --- a/betterbase/package.json +++ b/betterbase/package.json @@ -1,7 +1,7 @@ { "name": "betterbase", "private": true, - "packageManager": "bun@1.1.38", + "packageManager": "bun@1.2.14", "workspaces": [ "apps/*", "packages/*" @@ -10,7 +10,7 @@ "build": "turbo run build", "dev": "turbo run dev --parallel", "lint": "turbo run lint", - "typecheck": "turbo run typecheck" + "typecheck": "turbo run typecheck --filter '*'" }, "devDependencies": { "turbo": "^2.0.0", diff --git a/betterbase/packages/cli/package.json b/betterbase/packages/cli/package.json index 5b8543d..d4ba78a 100644 --- a/betterbase/packages/cli/package.json +++ b/betterbase/packages/cli/package.json @@ -16,11 +16,12 @@ "chalk": "^5.3.0", "commander": "^12.1.0", "inquirer": "^10.2.2", - "zod": "^3.23.8" + "zod": "^3.23.8", + "typescript": "^5.3.0", + "@typescript/vfs": "^1.5.0" }, "devDependencies": { - "@types/bun": "^1.3.9", - "typescript": "^5.9.3" + "@types/bun": "^1.3.9" }, "exports": { ".": "./src/index.ts" diff --git a/betterbase/packages/cli/src/build.ts b/betterbase/packages/cli/src/build.ts index 198205e..86937ef 100644 --- a/betterbase/packages/cli/src/build.ts +++ b/betterbase/packages/cli/src/build.ts @@ -1,10 +1,16 @@ +import path from 'node:path'; + /** * Build the CLI as a standalone bundled executable output. */ export async function buildStandaloneCli(): Promise { + const moduleDir = import.meta.dir; + const entrypoint = path.resolve(moduleDir, 'index.ts'); + const outdir = path.resolve(moduleDir, '../dist'); + const result = await Bun.build({ - entrypoints: ['./src/index.ts'], - outdir: './dist', + entrypoints: [entrypoint], + outdir, target: 'bun', format: 'esm', minify: false, @@ -17,7 +23,7 @@ export async function buildStandaloneCli(): Promise { throw new Error(`Build failed with ${result.logs.length} error(s).\n${diagnostics}`); } - const outputPath = './dist/index.js'; + const outputPath = path.join(outdir, 'index.js'); const compiled = await Bun.file(outputPath).text(); await Bun.write(outputPath, `#!/usr/bin/env bun\n${compiled}`); } @@ -26,11 +32,7 @@ async function main(): Promise { await buildStandaloneCli(); } -const isEsmMain = typeof import.meta !== 'undefined' && import.meta.main; -const cjs = globalThis as unknown as { require?: { main?: unknown }; module?: unknown }; -const isCjsMain = cjs.require?.main !== undefined && cjs.require.main === cjs.module; - -if (isEsmMain || isCjsMain) { +if (import.meta.main) { main().catch((error) => { console.error('Build failed:', error); process.exit(1); diff --git a/betterbase/packages/cli/src/commands/init.ts b/betterbase/packages/cli/src/commands/init.ts index 5859b89..ccc3741 100644 --- a/betterbase/packages/cli/src/commands/init.ts +++ b/betterbase/packages/cli/src/commands/init.ts @@ -63,8 +63,8 @@ async function initializeGitRepository(projectPath: string): Promise { function buildPackageJson(projectName: string, databaseMode: DatabaseMode, useAuth: boolean): string { const dependencies: Record = { hono: '^4.11.9', - 'drizzle-orm': '^0.44.5', - zod: '^3.25.76', + 'drizzle-orm': '^0.45.1', + zod: '^4.3.6', }; if (databaseMode === 'turso') { @@ -427,7 +427,6 @@ bun.lockb .env.* !.env.example local.db -.drizzle `, ); @@ -447,7 +446,7 @@ export const healthRoute = new Hono(); healthRoute.get('/', async (c) => { try { - await db.run(sql\`select 1\`); + await db.${databaseMode === 'local' ? 'run' : 'execute'}(sql\`select 1\`); return c.json({ status: 'healthy', @@ -471,9 +470,9 @@ healthRoute.get('/', async (c) => { await writeFile( path.join(projectPath, 'src/middleware/validation.ts'), `import { HTTPException } from 'hono/http-exception'; -import type { ZodType } from 'zod'; +import { z } from 'zod'; -export function parseBody(schema: ZodType, body: unknown): T { +export function parseBody(schema: S, body: unknown): z.output { const result = schema.safeParse(body); if (!result.success) { @@ -510,9 +509,60 @@ const createUserSchema = z.object({ export const usersRoute = new Hono(); +const DEFAULT_LIMIT = 25; +const MAX_LIMIT = 100; +const DEFAULT_OFFSET = 0; + +function parseNonNegativeInt(value: string | undefined, fallback: number): number { + if (!value) { + return fallback; + } + + const parsed = Number(value); + if (!Number.isInteger(parsed) || parsed < 0) { + return fallback; + } + + return parsed; +} + usersRoute.get('/', async (c) => { - const allUsers = await db.select().from(users); - return c.json({ users: allUsers }); + const requestedLimit = parseNonNegativeInt(c.req.query('limit'), DEFAULT_LIMIT); + const limit = Math.min(requestedLimit, MAX_LIMIT); + const offset = parseNonNegativeInt(c.req.query('offset'), DEFAULT_OFFSET); + + if (limit === 0) { + return c.json({ + users: [], + pagination: { + limit, + offset, + hasMore: false, + }, + }); + } + + try { + const rows = await db.select().from(users).limit(limit + 1).offset(offset); + const hasMore = rows.length > limit; + const paginatedUsers = rows.slice(0, limit); + + return c.json({ + users: paginatedUsers, + pagination: { + limit, + offset, + hasMore, + }, + }); + } catch (error) { + if (error instanceof HTTPException) { + throw error; + } + + console.error('Failed to fetch users:', error); + throw error; + } }); usersRoute.post('/', async (c) => { diff --git a/betterbase/packages/cli/src/commands/migrate.ts b/betterbase/packages/cli/src/commands/migrate.ts index 602915b..37573f8 100644 --- a/betterbase/packages/cli/src/commands/migrate.ts +++ b/betterbase/packages/cli/src/commands/migrate.ts @@ -1,32 +1,358 @@ +import { Database } from 'bun:sqlite'; +import chalk from 'chalk'; +import { access, mkdir, readdir } from 'node:fs/promises'; +import path from 'node:path'; import { z } from 'zod'; +import { DEFAULT_DB_PATH } from '../constants'; import * as logger from '../utils/logger'; import * as prompts from '../utils/prompts'; const migrateOptionsSchema = z.object({ - destructive: z.boolean().optional(), + preview: z.boolean().optional(), + production: z.boolean().optional(), }); export type MigrateCommandOptions = z.infer; -/** - * Run the `bb migrate` command. - */ +export type MigrationChangeType = + | 'create_table' + | 'add_column' + | 'modify_column' + | 'drop_column' + | 'drop_table'; + +export interface MigrationChange { + type: MigrationChangeType; + table: string; + column?: string; + detail?: string; + isDestructive: boolean; +} + +interface DrizzleResult { + success: boolean; + stdout: string; + stderr: string; +} + +interface MigrationBackup { + sourcePath: string; + backupPath: string; +} + +const DRIZZLE_DIR = 'drizzle'; + +async function runDrizzleKit(args: string[]): Promise { + const proc = Bun.spawn(['bunx', 'drizzle-kit', ...args], { + cwd: process.cwd(), + stdout: 'pipe', + stderr: 'pipe', + }); + + const [stdout, stderr, exitCode] = await Promise.all([ + new Response(proc.stdout).text(), + new Response(proc.stderr).text(), + proc.exited, + ]); + + return { + success: exitCode === 0, + stdout, + stderr, + }; +} + +async function listSqlFiles(baseDir: string): Promise> { + const entries = new Map(); + const root = path.join(process.cwd(), baseDir); + + const walk = async (dir: string): Promise => { + try { + await access(dir); + } catch { + return; + } + + for (const entry of await readdir(dir, { withFileTypes: true })) { + const fullPath = path.join(dir, entry.name); + + if (entry.isDirectory()) { + await walk(fullPath); + continue; + } + + if (!fullPath.endsWith('.sql')) { + continue; + } + + entries.set(path.relative(root, fullPath), await Bun.file(fullPath).text()); + } + }; + + await walk(root); + return entries; +} + +function analyzeMigration(sqlStatements: string[]): MigrationChange[] { + const changes: MigrationChange[] = []; + + for (const statement of sqlStatements) { + const sql = statement.trim(); + if (!sql) { + continue; + } + + const createTable = sql.match(/create\s+table\s+"?([\w.-]+)"?/i); + if (createTable) { + changes.push({ type: 'create_table', table: createTable[1], isDestructive: false, detail: sql }); + continue; + } + + const dropTable = sql.match(/drop\s+table\s+"?([\w.-]+)"?/i); + if (dropTable) { + changes.push({ type: 'drop_table', table: dropTable[1], isDestructive: true, detail: sql }); + continue; + } + + const addColumn = sql.match(/alter\s+table\s+"?([\w.-]+)"?\s+add\s+column\s+"?([\w.-]+)"?/i); + if (addColumn) { + changes.push({ + type: 'add_column', + table: addColumn[1], + column: addColumn[2], + isDestructive: false, + detail: sql, + }); + continue; + } + + const dropColumn = sql.match(/alter\s+table\s+"?([\w.-]+)"?\s+drop\s+column\s+"?([\w.-]+)"?/i); + if (dropColumn) { + changes.push({ + type: 'drop_column', + table: dropColumn[1], + column: dropColumn[2], + isDestructive: true, + detail: sql, + }); + continue; + } + + const alterColumn = sql.match( + /alter\s+table\s+"?([\w.-]+)"?\s+(alter\s+column\s+"?([\w.-]+)"?|rename\s+column\s+"?([\w.-]+)"?)/i, + ); + if (alterColumn) { + changes.push({ + type: 'modify_column', + table: alterColumn[1], + column: alterColumn[3] ?? alterColumn[4], + isDestructive: /drop\s+not\s+null|set\s+not\s+null|set\s+data\s+type|rename\s+column/i.test(sql), + detail: sql, + }); + continue; + } + } + + return changes; +} + +function displayDiff(changes: MigrationChange[]): void { + console.log('\nšŸ“Š Migration Preview\n'); + + if (changes.length === 0) { + console.log(chalk.gray('No schema changes detected.')); + return; + } + + const newTables = changes.filter((c) => c.type === 'create_table'); + const newColumns = changes.filter((c) => c.type === 'add_column'); + const modified = changes.filter((c) => c.type === 'modify_column'); + const destructive = changes.filter((c) => c.isDestructive); + + if (newTables.length > 0) { + console.log(chalk.green('āœ… New Tables:')); + for (const change of newTables) { + console.log(chalk.green(` + ${change.table}`)); + } + console.log(''); + } + + if (newColumns.length > 0) { + console.log(chalk.green('āœ… New Columns:')); + for (const change of newColumns) { + console.log(chalk.green(` + ${change.table}.${change.column ?? ''}`)); + } + console.log(''); + } + + if (modified.length > 0) { + console.log(chalk.yellow('āš ļø Modified Columns:')); + for (const change of modified) { + console.log(chalk.yellow(` ! ${change.table}.${change.column ?? ''}`)); + } + console.log(''); + } + + if (destructive.length > 0) { + console.log(chalk.red('āŒ Destructive Changes:')); + for (const change of destructive) { + console.log(chalk.red(` - ${change.type}: ${change.table}${change.column ? `.${change.column}` : ''}`)); + console.log(chalk.red(' āš ļø This will DELETE DATA')); + } + console.log(''); + } +} + +async function confirmDestructive(changes: MigrationChange[]): Promise { + const destructive = changes.filter((c) => c.isDestructive); + if (destructive.length === 0) { + return true; + } + + logger.warn('DESTRUCTIVE CHANGES DETECTED:'); + for (const change of destructive) { + console.log(` - ${change.type}: ${change.table}${change.column ? `.${change.column}` : ''}`); + } + + const confirmation = await prompts.text({ message: 'Type "delete data" to confirm:' }); + if (confirmation !== 'delete data') { + logger.warn('Confirmation phrase mismatch. Migration cancelled.'); + return false; + } + + return true; +} + +async function backupDatabase(): Promise { + const sourcePath = process.env.DB_PATH ?? DEFAULT_DB_PATH; + + try { + await access(sourcePath); + } catch { + logger.warn(`No local database found at ${sourcePath}; skipping backup.`); + return null; + } + + const timestamp = new Date().toISOString().replace(/:/g, '-'); + const backupDir = path.join(process.cwd(), 'backups'); + await mkdir(backupDir, { recursive: true }); + + const backupPath = path.join(backupDir, `db-${timestamp}.sqlite`); + + const db = new Database(sourcePath, { readonly: true }); + try { + const snapshot = db.serialize(); + await Bun.write(backupPath, snapshot); + } finally { + db.close(); + } + + logger.success(`Backup saved: ${backupPath}`); + return { sourcePath, backupPath }; +} + +async function restoreBackup(backup: MigrationBackup | null): Promise { + if (backup === null) { + return; + } + + const bytes = await Bun.file(backup.backupPath).bytes(); + const restoredDb = Database.deserialize(bytes); + + try { + await Bun.write(backup.sourcePath, restoredDb.serialize()); + } finally { + restoredDb.close(); + } + + logger.warn(`Rollback complete. Restored database from ${backup.backupPath}`); +} + +function splitStatements(sql: string): string[] { + return sql + .split(/;\s*(?:\n|$)/g) + .map((statement) => statement.trim()) + .filter((statement) => statement.length > 0); +} + +async function collectChangesFromGenerate(): Promise { + const before = await listSqlFiles(DRIZZLE_DIR); + const generate = await runDrizzleKit(['generate']); + + if (!generate.success) { + if (/conflict|merge/i.test(generate.stderr)) { + throw new Error(`Migration conflict detected. Resolve migration files manually.\n${generate.stderr}`); + } + + throw new Error(`Failed to generate migrations.\n${generate.stderr || generate.stdout}`); + } + + const after = await listSqlFiles(DRIZZLE_DIR); + const changedSql: string[] = []; + + for (const [relativePath, content] of after.entries()) { + const previous = before.get(relativePath); + if (previous === content) { + continue; + } + + changedSql.push(...splitStatements(content)); + } + + return analyzeMigration(changedSql); +} + export async function runMigrateCommand(rawOptions: MigrateCommandOptions): Promise { const options = migrateOptionsSchema.parse(rawOptions); - const shouldContinue = - options.destructive === true - ? true - : await prompts.confirm({ - message: 'This migration may include destructive changes. Continue?', - initial: false, - }); + logger.info('Generating migration files with drizzle-kit...'); + const changes = await collectChangesFromGenerate(); + displayDiff(changes); - if (!shouldContinue) { - logger.warn('Migration cancelled by user.'); + if (options.preview) { + logger.info('Preview mode enabled. No migrations applied.'); return; } - logger.info('Analyzing migration plan...'); - logger.success('Migration scaffold complete. (Placeholder implementation)'); + if (options.production) { + const proceed = await prompts.confirm({ + message: 'Apply migrations to production now?', + initial: false, + }); + + if (!proceed) { + logger.warn('Migration cancelled by user.'); + return; + } + } + + let backup: MigrationBackup | null = null; + + if (changes.some((change) => change.isDestructive)) { + backup = await backupDatabase(); + + const confirmed = await confirmDestructive(changes); + if (!confirmed) { + return; + } + } + + logger.info('Applying migrations with drizzle-kit push...'); + const push = await runDrizzleKit(['push']); + + if (!push.success) { + await restoreBackup(backup); + + if (/connect|econn|database/i.test(push.stderr)) { + throw new Error(`Database connection failed while applying migration.\n${push.stderr}`); + } + + if (/conflict|merge/i.test(push.stderr)) { + throw new Error(`Migration conflict detected during push. Please resolve and retry.\n${push.stderr}`); + } + + throw new Error(`Migration push failed.\n${push.stderr || push.stdout}`); + } + + logger.success('Migration complete!'); } diff --git a/betterbase/packages/cli/src/constants.ts b/betterbase/packages/cli/src/constants.ts new file mode 100644 index 0000000..8daff7f --- /dev/null +++ b/betterbase/packages/cli/src/constants.ts @@ -0,0 +1 @@ +export const DEFAULT_DB_PATH = 'local.db'; diff --git a/betterbase/packages/cli/src/index.ts b/betterbase/packages/cli/src/index.ts index e0136da..932ab3c 100644 --- a/betterbase/packages/cli/src/index.ts +++ b/betterbase/packages/cli/src/index.ts @@ -26,10 +26,23 @@ export function createProgram(): Command { program .command('migrate') - .description('Run BetterBase database migrations') - .option('--destructive', 'allow destructive migration flow') - .action(async (options: { destructive?: boolean }) => { - await runMigrateCommand({ destructive: options.destructive }); + .description('Generate and apply migrations for local development') + .action(async () => { + await runMigrateCommand({}); + }); + + program + .command('migrate:preview') + .description('Preview migration diff without applying changes') + .action(async () => { + await runMigrateCommand({ preview: true }); + }); + + program + .command('migrate:production') + .description('Apply migrations to production (requires confirmation)') + .action(async () => { + await runMigrateCommand({ production: true }); }); return program; diff --git a/betterbase/packages/cli/src/utils/scanner.ts b/betterbase/packages/cli/src/utils/scanner.ts new file mode 100644 index 0000000..2221507 --- /dev/null +++ b/betterbase/packages/cli/src/utils/scanner.ts @@ -0,0 +1,251 @@ +import { readFileSync } from 'node:fs'; +import * as ts from 'typescript'; + +export interface ColumnInfo { + name: string; + type: string; + nullable: boolean; + unique: boolean; + primaryKey: boolean; + defaultValue?: string; + references?: string; +} + +export interface TableInfo { + name: string; + columns: Record; + relations: string[]; + indexes: string[]; +} + +function unwrapExpression(expression: ts.Expression): ts.Expression { + let current = expression; + + while ( + ts.isParenthesizedExpression(current) || + ts.isAsExpression(current) || + ts.isTypeAssertionExpression(current) || + ts.isSatisfiesExpression(current) + ) { + if (ts.isParenthesizedExpression(current)) { + current = current.expression; + continue; + } + + if (ts.isAsExpression(current) || ts.isTypeAssertionExpression(current) || ts.isSatisfiesExpression(current)) { + current = current.expression; + continue; + } + } + + return current; +} + +function getCallName(call: ts.CallExpression): string { + if (ts.isIdentifier(call.expression)) { + return call.expression.text; + } + + if (ts.isPropertyAccessExpression(call.expression)) { + return call.expression.name.text; + } + + return ''; +} + +function getExpressionText(sourceFile: ts.SourceFile, node: ts.Node | undefined): string { + if (!node) { + return ''; + } + + return node.getText(sourceFile); +} + +export class SchemaScanner { + private readonly sourceFile: ts.SourceFile; + + constructor(schemaPath: string) { + const sourceCode = readFileSync(schemaPath, 'utf-8'); + this.sourceFile = ts.createSourceFile('schema.ts', sourceCode, ts.ScriptTarget.Latest, true, ts.ScriptKind.TS); + } + + scan(): Record { + const tables: Record = {}; + + const visit = (node: ts.Node): void => { + if (ts.isVariableStatement(node)) { + for (const declaration of node.declarationList.declarations) { + if (!ts.isIdentifier(declaration.name) || !declaration.initializer) { + continue; + } + + const initializer = unwrapExpression(declaration.initializer); + if (!ts.isCallExpression(initializer)) { + continue; + } + + const functionName = getCallName(initializer); + if (functionName === 'sqliteTable' || functionName === 'pgTable' || functionName === 'mysqlTable') { + tables[declaration.name.text] = this.parseTable(initializer); + } + } + } + + ts.forEachChild(node, visit); + }; + + visit(this.sourceFile); + return tables; + } + + private parseTable(callExpression: ts.CallExpression): TableInfo { + const [nameArg, columnsArg, indexesArg] = callExpression.arguments; + const tableName = ts.isStringLiteral(nameArg) ? nameArg.text : getExpressionText(this.sourceFile, nameArg); + + const columns: Record = {}; + const relations: string[] = []; + + if (columnsArg && ts.isObjectLiteralExpression(columnsArg)) { + for (const property of columnsArg.properties) { + if (!ts.isPropertyAssignment(property)) { + continue; + } + + const columnName = ts.isIdentifier(property.name) + ? property.name.text + : ts.isStringLiteral(property.name) + ? property.name.text + : property.name.getText(this.sourceFile); + + const columnInfo = this.parseColumn(columnName, property.initializer); + columns[columnName] = columnInfo; + + if (columnInfo.references) { + relations.push(columnInfo.references); + } + } + } + + const indexes = this.parseIndexes(indexesArg); + + return { + name: tableName, + columns, + relations, + indexes, + }; + } + + private parseIndexes(indexesArg: ts.Expression | undefined): string[] { + if (!indexesArg) { + return []; + } + + const indexes: string[] = []; + const indexRoot = unwrapExpression(indexesArg); + + const collectFromObject = (obj: ts.ObjectLiteralExpression): void => { + for (const property of obj.properties) { + if (!ts.isPropertyAssignment(property)) { + continue; + } + + const value = unwrapExpression(property.initializer); + if (!ts.isCallExpression(value)) { + continue; + } + + const callName = getCallName(value); + if (callName === 'index' || callName === 'uniqueIndex') { + const key = ts.isIdentifier(property.name) + ? property.name.text + : ts.isStringLiteral(property.name) + ? property.name.text + : property.name.getText(this.sourceFile); + indexes.push(key); + } + } + }; + + if (ts.isArrowFunction(indexRoot) || ts.isFunctionExpression(indexRoot)) { + const body = indexRoot.body; + if (ts.isObjectLiteralExpression(body)) { + collectFromObject(body); + } + + if (ts.isBlock(body)) { + for (const statement of body.statements) { + if (!ts.isReturnStatement(statement) || !statement.expression) { + continue; + } + + const expression = unwrapExpression(statement.expression); + if (ts.isObjectLiteralExpression(expression)) { + collectFromObject(expression); + } + } + } + } + + return indexes; + } + + private parseColumn(columnName: string, expression: ts.Expression): ColumnInfo { + let type = 'unknown'; + let nullable = true; + let unique = false; + let primaryKey = false; + let defaultValue: string | undefined; + let references: string | undefined; + + let current = unwrapExpression(expression); + + while (ts.isCallExpression(current)) { + const methodName = getCallName(current); + + if (methodName === 'text' || methodName === 'varchar' || methodName === 'char') { + type = 'text'; + } else if (methodName === 'integer' || methodName === 'int' || methodName === 'bigint' || methodName === 'serial') { + type = 'integer'; + } else if (methodName === 'real' || methodName === 'numeric' || methodName === 'decimal' || methodName === 'doublePrecision') { + type = 'number'; + } else if (methodName === 'boolean') { + type = 'boolean'; + } else if (methodName === 'timestamp' || methodName === 'datetime') { + type = 'datetime'; + } else if (methodName === 'json' || methodName === 'jsonb') { + type = 'json'; + } else if (methodName === 'blob') { + type = 'blob'; + } else if (methodName === 'notNull') { + nullable = false; + } else if (methodName === 'unique') { + unique = true; + } else if (methodName === 'primaryKey') { + primaryKey = true; + nullable = false; + } else if (methodName.startsWith('default')) { + defaultValue = getExpressionText(this.sourceFile, current.arguments[0]); + } else if (methodName === 'references') { + references = getExpressionText(this.sourceFile, current.arguments[0]); + } + + if (ts.isPropertyAccessExpression(current.expression)) { + current = unwrapExpression(current.expression.expression); + continue; + } + + break; + } + + return { + name: columnName, + type, + nullable, + unique, + primaryKey, + defaultValue, + references, + }; + } +} diff --git a/betterbase/packages/cli/test/scanner.test.ts b/betterbase/packages/cli/test/scanner.test.ts new file mode 100644 index 0000000..edf5594 --- /dev/null +++ b/betterbase/packages/cli/test/scanner.test.ts @@ -0,0 +1,61 @@ +import { mkdtempSync, rmSync, writeFileSync } from 'node:fs'; +import { tmpdir } from 'node:os'; +import path from 'node:path'; +import { describe, expect, test } from 'bun:test'; +import { SchemaScanner } from '../src/utils/scanner'; + +describe('SchemaScanner', () => { + test('extracts tables, columns, relations, and indexes from drizzle schema', () => { + const dir = mkdtempSync(path.join(tmpdir(), 'bb-scanner-')); + + try { + const schemaPath = path.join(dir, 'schema.ts'); + writeFileSync( + schemaPath, + ` + import { sqliteTable, text, integer, index } from 'drizzle-orm/sqlite-core'; + + export const users = sqliteTable('users', { + id: text('id').primaryKey(), + email: text('email').notNull().unique(), + age: integer('age').default(18), + }, (table) => ({ + usersEmailIdx: index('users_email_idx').on(table.email), + })); + + export const posts = sqliteTable('posts', { + id: text('id').primaryKey(), + userId: text('user_id').notNull().references(() => users.id), + title: text('title').notNull(), + }); + + export const comments = sqliteTable('comments', { + id: text('id').primaryKey(), + postId: text('post_id').notNull().references(() => posts.id), + body: text('body'), + }); + `, + ); + + const scanner = new SchemaScanner(schemaPath); + const tables = scanner.scan(); + + expect(Object.keys(tables)).toEqual(['users', 'posts', 'comments']); + + expect(tables.users.name).toBe('users'); + expect(tables.users.columns.id.primaryKey).toBe(true); + expect(tables.users.columns.id.nullable).toBe(false); + expect(tables.users.columns.email.unique).toBe(true); + expect(tables.users.columns.age.defaultValue).toBe('18'); + expect(tables.users.indexes).toContain('usersEmailIdx'); + + expect(tables.posts.columns.userId.references).toBe('() => users.id'); + expect(tables.posts.relations).toContain('() => users.id'); + + expect(tables.comments.columns.postId.references).toBe('() => posts.id'); + expect(tables.comments.relations).toContain('() => posts.id'); + } finally { + rmSync(dir, { recursive: true, force: true }); + } + }); +}); diff --git a/betterbase/packages/cli/test/smoke.test.ts b/betterbase/packages/cli/test/smoke.test.ts index f082470..cb9d36a 100644 --- a/betterbase/packages/cli/test/smoke.test.ts +++ b/betterbase/packages/cli/test/smoke.test.ts @@ -14,9 +14,15 @@ describe('cli', () => { expect(init?.registeredArguments[0]?.name()).toBe('project-name'); }); - test('registers migrate command', () => { + test('registers migrate commands', () => { const program = createProgram(); + const migrate = program.commands.find((command) => command.name() === 'migrate'); + const preview = program.commands.find((command) => command.name() === 'migrate:preview'); + const production = program.commands.find((command) => command.name() === 'migrate:production'); + expect(migrate).toBeDefined(); + expect(preview).toBeDefined(); + expect(production).toBeDefined(); }); }); diff --git a/betterbase/templates/base/README.md b/betterbase/templates/base/README.md index e6d4cf1..8d481e3 100644 --- a/betterbase/templates/base/README.md +++ b/betterbase/templates/base/README.md @@ -26,3 +26,15 @@ src/ betterbase.config.ts drizzle.config.ts ``` + + +## Quick Start + +- Install dependencies: `bun install` +- Start development server: `bun run dev` +- Generate Drizzle migrations: `bun run db:generate` +- Apply migrations locally: `bun run db:push` +- Build for production: `bun run build` +- Start production server: `bun run start` + +Environment variables are validated in `src/lib/env.ts` (`NODE_ENV`, `PORT`, `DB_PATH`). diff --git a/betterbase/templates/base/package.json b/betterbase/templates/base/package.json index e631611..c74eee0 100644 --- a/betterbase/templates/base/package.json +++ b/betterbase/templates/base/package.json @@ -6,7 +6,9 @@ "dev": "bun --hot run src/index.ts", "db:generate": "drizzle-kit generate", "db:push": "bun run src/db/migrate.ts", - "typecheck": "tsc --noEmit" + "typecheck": "tsc --noEmit", + "build": "bun build src/index.ts --outfile dist/index.js --target bun", + "start": "bun run dist/index.js" }, "dependencies": { "hono": "^4.6.10", diff --git a/betterbase/templates/base/src/db/index.ts b/betterbase/templates/base/src/db/index.ts index ddbbc3f..485c7b4 100644 --- a/betterbase/templates/base/src/db/index.ts +++ b/betterbase/templates/base/src/db/index.ts @@ -1,8 +1,10 @@ import { Database } from 'bun:sqlite'; import { drizzle } from 'drizzle-orm/bun-sqlite'; +import { env } from '../lib/env'; import * as schema from './schema'; -const dbPath = process.env.DB_PATH ?? 'local.db'; -const sqlite = new Database(dbPath, { create: true }); + +// env.DB_PATH is always present because env schema provides a default. +const sqlite = new Database(env.DB_PATH, { create: true }); export const db = drizzle(sqlite, { schema }); diff --git a/betterbase/templates/base/src/index.ts b/betterbase/templates/base/src/index.ts index 4065ba5..359c1c5 100644 --- a/betterbase/templates/base/src/index.ts +++ b/betterbase/templates/base/src/index.ts @@ -1,4 +1,5 @@ import { Hono } from 'hono'; +import { env } from './lib/env'; import { registerRoutes } from './routes'; const app = new Hono(); @@ -6,8 +7,8 @@ registerRoutes(app); const server = Bun.serve({ fetch: app.fetch, - port: Number(process.env.PORT ?? 3000), - development: process.env.NODE_ENV === 'development', + port: env.PORT, + development: env.NODE_ENV === 'development', }); console.log(`šŸš€ Server running at http://localhost:${server.port}`); diff --git a/betterbase/templates/base/src/lib/env.ts b/betterbase/templates/base/src/lib/env.ts index c4a0b38..c61b432 100644 --- a/betterbase/templates/base/src/lib/env.ts +++ b/betterbase/templates/base/src/lib/env.ts @@ -1,8 +1,11 @@ import { z } from 'zod'; +export const DEFAULT_DB_PATH = 'local.db'; + const envSchema = z.object({ NODE_ENV: z.enum(['development', 'test', 'production']).default('development'), PORT: z.coerce.number().default(3000), + DB_PATH: z.string().min(1).default(DEFAULT_DB_PATH), }); export const env = envSchema.parse(process.env); diff --git a/betterbase/templates/base/src/routes/index.ts b/betterbase/templates/base/src/routes/index.ts index 64a9e83..2fb73e6 100644 --- a/betterbase/templates/base/src/routes/index.ts +++ b/betterbase/templates/base/src/routes/index.ts @@ -2,6 +2,7 @@ import { Hono } from 'hono'; import { cors } from 'hono/cors'; import { logger } from 'hono/logger'; import { HTTPException } from 'hono/http-exception'; +import { env } from '../lib/env'; import { healthRoute } from './health'; import { usersRoute } from './users'; @@ -11,12 +12,12 @@ export function registerRoutes(app: Hono): void { app.onError((err, c) => { const isHttpError = err instanceof HTTPException; - const showDetailedError = process.env.NODE_ENV === 'development' || isHttpError; + const showDetailedError = env.NODE_ENV === 'development' || isHttpError; return c.json( { error: showDetailedError ? err.message : 'Internal Server Error', - stack: process.env.NODE_ENV === 'development' ? err.stack : undefined, + stack: env.NODE_ENV === 'development' ? err.stack : undefined, details: isHttpError ? (err as { cause?: unknown }).cause ?? null : null, }, isHttpError ? err.status : 500, diff --git a/betterbase/templates/base/src/routes/users.ts b/betterbase/templates/base/src/routes/users.ts index 29f17c6..5f2605b 100644 --- a/betterbase/templates/base/src/routes/users.ts +++ b/betterbase/templates/base/src/routes/users.ts @@ -10,11 +10,58 @@ export const createUserSchema = z.object({ name: z.string().min(1), }); +const DEFAULT_LIMIT = 25; +const MAX_LIMIT = 100; +const DEFAULT_OFFSET = 0; + +const paginationSchema = z.object({ + limit: z.coerce.number().int().nonnegative().default(DEFAULT_LIMIT), + offset: z.coerce.number().int().nonnegative().default(DEFAULT_OFFSET), +}); + export const usersRoute = new Hono(); usersRoute.get('/', async (c) => { - const allUsers = await db.select().from(users); - return c.json({ users: allUsers }); + const pagination = paginationSchema.parse({ + limit: c.req.query('limit') ?? undefined, + offset: c.req.query('offset') ?? undefined, + }); + + const limit = Math.min(pagination.limit, MAX_LIMIT); + const offset = pagination.offset; + + if (limit === 0) { + return c.json({ + users: [], + pagination: { + limit, + offset, + hasMore: false, + }, + }); + } + + try { + const rows = await db.select().from(users).limit(limit + 1).offset(offset); + const hasMore = rows.length > limit; + const paginatedUsers = rows.slice(0, limit); + + return c.json({ + users: paginatedUsers, + pagination: { + limit, + offset, + hasMore, + }, + }); + } catch (error) { + if (error instanceof HTTPException) { + throw error; + } + + console.error('Failed to fetch users:', error); + throw error; + } }); usersRoute.post('/', async (c) => { diff --git a/betterbase/tsconfig.base.json b/betterbase/tsconfig.base.json index b1d6a90..e86a10c 100644 --- a/betterbase/tsconfig.base.json +++ b/betterbase/tsconfig.base.json @@ -8,6 +8,7 @@ "skipLibCheck": true, "resolveJsonModule": true, "isolatedModules": true, - "forceConsistentCasingInFileNames": true + "forceConsistentCasingInFileNames": true, + "declaration": true } } From 4dca006aa27806b6dce955b0498721ca13393878 Mon Sep 17 00:00:00 2001 From: Ziad Khaled Date: Thu, 19 Feb 2026 04:00:22 +0200 Subject: [PATCH 07/12] Add route/context scanners and dev context generation command --- betterbase/.gitignore | 6 +- betterbase/README.md | 10 + betterbase/apps/cli/tsconfig.json | 11 +- betterbase/package.json | 4 +- betterbase/packages/cli/package.json | 7 +- betterbase/packages/cli/src/build.ts | 18 +- betterbase/packages/cli/src/commands/dev.ts | 29 ++ betterbase/packages/cli/src/commands/init.ts | 66 +++- .../packages/cli/src/commands/migrate.ts | 356 +++++++++++++++++- betterbase/packages/cli/src/constants.ts | 1 + betterbase/packages/cli/src/index.ts | 31 +- .../cli/src/utils/context-generator.ts | 69 ++++ .../packages/cli/src/utils/route-scanner.ts | 161 ++++++++ betterbase/packages/cli/src/utils/scanner.ts | 251 ++++++++++++ .../packages/cli/src/utils/schema-scanner.ts | 2 + .../cli/test/context-generator.test.ts | 49 +++ .../packages/cli/test/route-scanner.test.ts | 45 +++ betterbase/packages/cli/test/scanner.test.ts | 61 +++ betterbase/packages/cli/test/smoke.test.ts | 15 +- betterbase/templates/base/README.md | 12 + betterbase/templates/base/package.json | 4 +- betterbase/templates/base/src/db/index.ts | 6 +- betterbase/templates/base/src/index.ts | 5 +- betterbase/templates/base/src/lib/env.ts | 3 + betterbase/templates/base/src/routes/index.ts | 5 +- betterbase/templates/base/src/routes/users.ts | 51 ++- betterbase/tsconfig.base.json | 3 +- 27 files changed, 1226 insertions(+), 55 deletions(-) create mode 100644 betterbase/packages/cli/src/commands/dev.ts create mode 100644 betterbase/packages/cli/src/constants.ts create mode 100644 betterbase/packages/cli/src/utils/context-generator.ts create mode 100644 betterbase/packages/cli/src/utils/route-scanner.ts create mode 100644 betterbase/packages/cli/src/utils/scanner.ts create mode 100644 betterbase/packages/cli/src/utils/schema-scanner.ts create mode 100644 betterbase/packages/cli/test/context-generator.test.ts create mode 100644 betterbase/packages/cli/test/route-scanner.test.ts create mode 100644 betterbase/packages/cli/test/scanner.test.ts diff --git a/betterbase/.gitignore b/betterbase/.gitignore index 51c8bb1..9b75d8d 100644 --- a/betterbase/.gitignore +++ b/betterbase/.gitignore @@ -3,14 +3,16 @@ node_modules .turbo dist .next +*.sqlite3 +*.sqlite +*.db +*.tsbuildinfo .vscode/ .idea/ .env .env.* -.env.local -.env.test !.env.example *.log diff --git a/betterbase/README.md b/betterbase/README.md index 46bf456..d9aaed2 100644 --- a/betterbase/README.md +++ b/betterbase/README.md @@ -19,6 +19,16 @@ Initial BetterBase monorepo scaffold with a concrete base template. - Workspace orchestration: **Turborepo** - Language: **TypeScript** + +## Monorepo Commands + +From the monorepo root: + +- `bun install` +- `bun run dev` +- `bun run build` +- `bun run typecheck` + ## Base Template Commands From `templates/base`: diff --git a/betterbase/apps/cli/tsconfig.json b/betterbase/apps/cli/tsconfig.json index 4031161..cd17ee5 100644 --- a/betterbase/apps/cli/tsconfig.json +++ b/betterbase/apps/cli/tsconfig.json @@ -3,7 +3,14 @@ "compilerOptions": { "outDir": "dist", "rootDir": ".", - "types": ["bun"] + "types": [ + "bun" + ] }, - "include": ["src", "test"] + "include": [ + "src/**/*.ts", + "src/**/*.tsx", + "test/**/*.ts", + "test/**/*.tsx" + ] } diff --git a/betterbase/package.json b/betterbase/package.json index eaa5a17..0f4c3d8 100644 --- a/betterbase/package.json +++ b/betterbase/package.json @@ -1,7 +1,7 @@ { "name": "betterbase", "private": true, - "packageManager": "bun@1.1.38", + "packageManager": "bun@1.2.14", "workspaces": [ "apps/*", "packages/*" @@ -10,7 +10,7 @@ "build": "turbo run build", "dev": "turbo run dev --parallel", "lint": "turbo run lint", - "typecheck": "turbo run typecheck" + "typecheck": "turbo run typecheck --filter '*'" }, "devDependencies": { "turbo": "^2.0.0", diff --git a/betterbase/packages/cli/package.json b/betterbase/packages/cli/package.json index 5b8543d..d4ba78a 100644 --- a/betterbase/packages/cli/package.json +++ b/betterbase/packages/cli/package.json @@ -16,11 +16,12 @@ "chalk": "^5.3.0", "commander": "^12.1.0", "inquirer": "^10.2.2", - "zod": "^3.23.8" + "zod": "^3.23.8", + "typescript": "^5.3.0", + "@typescript/vfs": "^1.5.0" }, "devDependencies": { - "@types/bun": "^1.3.9", - "typescript": "^5.9.3" + "@types/bun": "^1.3.9" }, "exports": { ".": "./src/index.ts" diff --git a/betterbase/packages/cli/src/build.ts b/betterbase/packages/cli/src/build.ts index 198205e..86937ef 100644 --- a/betterbase/packages/cli/src/build.ts +++ b/betterbase/packages/cli/src/build.ts @@ -1,10 +1,16 @@ +import path from 'node:path'; + /** * Build the CLI as a standalone bundled executable output. */ export async function buildStandaloneCli(): Promise { + const moduleDir = import.meta.dir; + const entrypoint = path.resolve(moduleDir, 'index.ts'); + const outdir = path.resolve(moduleDir, '../dist'); + const result = await Bun.build({ - entrypoints: ['./src/index.ts'], - outdir: './dist', + entrypoints: [entrypoint], + outdir, target: 'bun', format: 'esm', minify: false, @@ -17,7 +23,7 @@ export async function buildStandaloneCli(): Promise { throw new Error(`Build failed with ${result.logs.length} error(s).\n${diagnostics}`); } - const outputPath = './dist/index.js'; + const outputPath = path.join(outdir, 'index.js'); const compiled = await Bun.file(outputPath).text(); await Bun.write(outputPath, `#!/usr/bin/env bun\n${compiled}`); } @@ -26,11 +32,7 @@ async function main(): Promise { await buildStandaloneCli(); } -const isEsmMain = typeof import.meta !== 'undefined' && import.meta.main; -const cjs = globalThis as unknown as { require?: { main?: unknown }; module?: unknown }; -const isCjsMain = cjs.require?.main !== undefined && cjs.require.main === cjs.module; - -if (isEsmMain || isCjsMain) { +if (import.meta.main) { main().catch((error) => { console.error('Build failed:', error); process.exit(1); diff --git a/betterbase/packages/cli/src/commands/dev.ts b/betterbase/packages/cli/src/commands/dev.ts new file mode 100644 index 0000000..f801372 --- /dev/null +++ b/betterbase/packages/cli/src/commands/dev.ts @@ -0,0 +1,29 @@ +import { watch } from 'node:fs'; +import path from 'node:path'; +import { ContextGenerator } from '../utils/context-generator'; + +export async function runDevCommand(projectRoot: string = process.cwd()): Promise { + const generator = new ContextGenerator(); + + await generator.generate(projectRoot); + + const watchPaths = [path.join(projectRoot, 'src/db/schema.ts'), path.join(projectRoot, 'src/routes')]; + + for (const watchPath of watchPaths) { + watch(watchPath, { recursive: true }, async (_eventType, filename) => { + console.log(`šŸ“ File changed: ${String(filename ?? '')}`); + console.log('šŸ”„ Regenerating context...'); + + const start = Date.now(); + try { + await generator.generate(projectRoot); + console.log(`āœ… Context updated in ${Date.now() - start}ms`); + } catch (error) { + const message = error instanceof Error ? error.message : String(error); + console.error(`āŒ Failed to regenerate context: ${message}`); + } + }); + } + + console.log('šŸ‘€ Watching for schema and route changes...'); +} diff --git a/betterbase/packages/cli/src/commands/init.ts b/betterbase/packages/cli/src/commands/init.ts index 5859b89..ccc3741 100644 --- a/betterbase/packages/cli/src/commands/init.ts +++ b/betterbase/packages/cli/src/commands/init.ts @@ -63,8 +63,8 @@ async function initializeGitRepository(projectPath: string): Promise { function buildPackageJson(projectName: string, databaseMode: DatabaseMode, useAuth: boolean): string { const dependencies: Record = { hono: '^4.11.9', - 'drizzle-orm': '^0.44.5', - zod: '^3.25.76', + 'drizzle-orm': '^0.45.1', + zod: '^4.3.6', }; if (databaseMode === 'turso') { @@ -427,7 +427,6 @@ bun.lockb .env.* !.env.example local.db -.drizzle `, ); @@ -447,7 +446,7 @@ export const healthRoute = new Hono(); healthRoute.get('/', async (c) => { try { - await db.run(sql\`select 1\`); + await db.${databaseMode === 'local' ? 'run' : 'execute'}(sql\`select 1\`); return c.json({ status: 'healthy', @@ -471,9 +470,9 @@ healthRoute.get('/', async (c) => { await writeFile( path.join(projectPath, 'src/middleware/validation.ts'), `import { HTTPException } from 'hono/http-exception'; -import type { ZodType } from 'zod'; +import { z } from 'zod'; -export function parseBody(schema: ZodType, body: unknown): T { +export function parseBody(schema: S, body: unknown): z.output { const result = schema.safeParse(body); if (!result.success) { @@ -510,9 +509,60 @@ const createUserSchema = z.object({ export const usersRoute = new Hono(); +const DEFAULT_LIMIT = 25; +const MAX_LIMIT = 100; +const DEFAULT_OFFSET = 0; + +function parseNonNegativeInt(value: string | undefined, fallback: number): number { + if (!value) { + return fallback; + } + + const parsed = Number(value); + if (!Number.isInteger(parsed) || parsed < 0) { + return fallback; + } + + return parsed; +} + usersRoute.get('/', async (c) => { - const allUsers = await db.select().from(users); - return c.json({ users: allUsers }); + const requestedLimit = parseNonNegativeInt(c.req.query('limit'), DEFAULT_LIMIT); + const limit = Math.min(requestedLimit, MAX_LIMIT); + const offset = parseNonNegativeInt(c.req.query('offset'), DEFAULT_OFFSET); + + if (limit === 0) { + return c.json({ + users: [], + pagination: { + limit, + offset, + hasMore: false, + }, + }); + } + + try { + const rows = await db.select().from(users).limit(limit + 1).offset(offset); + const hasMore = rows.length > limit; + const paginatedUsers = rows.slice(0, limit); + + return c.json({ + users: paginatedUsers, + pagination: { + limit, + offset, + hasMore, + }, + }); + } catch (error) { + if (error instanceof HTTPException) { + throw error; + } + + console.error('Failed to fetch users:', error); + throw error; + } }); usersRoute.post('/', async (c) => { diff --git a/betterbase/packages/cli/src/commands/migrate.ts b/betterbase/packages/cli/src/commands/migrate.ts index 602915b..37573f8 100644 --- a/betterbase/packages/cli/src/commands/migrate.ts +++ b/betterbase/packages/cli/src/commands/migrate.ts @@ -1,32 +1,358 @@ +import { Database } from 'bun:sqlite'; +import chalk from 'chalk'; +import { access, mkdir, readdir } from 'node:fs/promises'; +import path from 'node:path'; import { z } from 'zod'; +import { DEFAULT_DB_PATH } from '../constants'; import * as logger from '../utils/logger'; import * as prompts from '../utils/prompts'; const migrateOptionsSchema = z.object({ - destructive: z.boolean().optional(), + preview: z.boolean().optional(), + production: z.boolean().optional(), }); export type MigrateCommandOptions = z.infer; -/** - * Run the `bb migrate` command. - */ +export type MigrationChangeType = + | 'create_table' + | 'add_column' + | 'modify_column' + | 'drop_column' + | 'drop_table'; + +export interface MigrationChange { + type: MigrationChangeType; + table: string; + column?: string; + detail?: string; + isDestructive: boolean; +} + +interface DrizzleResult { + success: boolean; + stdout: string; + stderr: string; +} + +interface MigrationBackup { + sourcePath: string; + backupPath: string; +} + +const DRIZZLE_DIR = 'drizzle'; + +async function runDrizzleKit(args: string[]): Promise { + const proc = Bun.spawn(['bunx', 'drizzle-kit', ...args], { + cwd: process.cwd(), + stdout: 'pipe', + stderr: 'pipe', + }); + + const [stdout, stderr, exitCode] = await Promise.all([ + new Response(proc.stdout).text(), + new Response(proc.stderr).text(), + proc.exited, + ]); + + return { + success: exitCode === 0, + stdout, + stderr, + }; +} + +async function listSqlFiles(baseDir: string): Promise> { + const entries = new Map(); + const root = path.join(process.cwd(), baseDir); + + const walk = async (dir: string): Promise => { + try { + await access(dir); + } catch { + return; + } + + for (const entry of await readdir(dir, { withFileTypes: true })) { + const fullPath = path.join(dir, entry.name); + + if (entry.isDirectory()) { + await walk(fullPath); + continue; + } + + if (!fullPath.endsWith('.sql')) { + continue; + } + + entries.set(path.relative(root, fullPath), await Bun.file(fullPath).text()); + } + }; + + await walk(root); + return entries; +} + +function analyzeMigration(sqlStatements: string[]): MigrationChange[] { + const changes: MigrationChange[] = []; + + for (const statement of sqlStatements) { + const sql = statement.trim(); + if (!sql) { + continue; + } + + const createTable = sql.match(/create\s+table\s+"?([\w.-]+)"?/i); + if (createTable) { + changes.push({ type: 'create_table', table: createTable[1], isDestructive: false, detail: sql }); + continue; + } + + const dropTable = sql.match(/drop\s+table\s+"?([\w.-]+)"?/i); + if (dropTable) { + changes.push({ type: 'drop_table', table: dropTable[1], isDestructive: true, detail: sql }); + continue; + } + + const addColumn = sql.match(/alter\s+table\s+"?([\w.-]+)"?\s+add\s+column\s+"?([\w.-]+)"?/i); + if (addColumn) { + changes.push({ + type: 'add_column', + table: addColumn[1], + column: addColumn[2], + isDestructive: false, + detail: sql, + }); + continue; + } + + const dropColumn = sql.match(/alter\s+table\s+"?([\w.-]+)"?\s+drop\s+column\s+"?([\w.-]+)"?/i); + if (dropColumn) { + changes.push({ + type: 'drop_column', + table: dropColumn[1], + column: dropColumn[2], + isDestructive: true, + detail: sql, + }); + continue; + } + + const alterColumn = sql.match( + /alter\s+table\s+"?([\w.-]+)"?\s+(alter\s+column\s+"?([\w.-]+)"?|rename\s+column\s+"?([\w.-]+)"?)/i, + ); + if (alterColumn) { + changes.push({ + type: 'modify_column', + table: alterColumn[1], + column: alterColumn[3] ?? alterColumn[4], + isDestructive: /drop\s+not\s+null|set\s+not\s+null|set\s+data\s+type|rename\s+column/i.test(sql), + detail: sql, + }); + continue; + } + } + + return changes; +} + +function displayDiff(changes: MigrationChange[]): void { + console.log('\nšŸ“Š Migration Preview\n'); + + if (changes.length === 0) { + console.log(chalk.gray('No schema changes detected.')); + return; + } + + const newTables = changes.filter((c) => c.type === 'create_table'); + const newColumns = changes.filter((c) => c.type === 'add_column'); + const modified = changes.filter((c) => c.type === 'modify_column'); + const destructive = changes.filter((c) => c.isDestructive); + + if (newTables.length > 0) { + console.log(chalk.green('āœ… New Tables:')); + for (const change of newTables) { + console.log(chalk.green(` + ${change.table}`)); + } + console.log(''); + } + + if (newColumns.length > 0) { + console.log(chalk.green('āœ… New Columns:')); + for (const change of newColumns) { + console.log(chalk.green(` + ${change.table}.${change.column ?? ''}`)); + } + console.log(''); + } + + if (modified.length > 0) { + console.log(chalk.yellow('āš ļø Modified Columns:')); + for (const change of modified) { + console.log(chalk.yellow(` ! ${change.table}.${change.column ?? ''}`)); + } + console.log(''); + } + + if (destructive.length > 0) { + console.log(chalk.red('āŒ Destructive Changes:')); + for (const change of destructive) { + console.log(chalk.red(` - ${change.type}: ${change.table}${change.column ? `.${change.column}` : ''}`)); + console.log(chalk.red(' āš ļø This will DELETE DATA')); + } + console.log(''); + } +} + +async function confirmDestructive(changes: MigrationChange[]): Promise { + const destructive = changes.filter((c) => c.isDestructive); + if (destructive.length === 0) { + return true; + } + + logger.warn('DESTRUCTIVE CHANGES DETECTED:'); + for (const change of destructive) { + console.log(` - ${change.type}: ${change.table}${change.column ? `.${change.column}` : ''}`); + } + + const confirmation = await prompts.text({ message: 'Type "delete data" to confirm:' }); + if (confirmation !== 'delete data') { + logger.warn('Confirmation phrase mismatch. Migration cancelled.'); + return false; + } + + return true; +} + +async function backupDatabase(): Promise { + const sourcePath = process.env.DB_PATH ?? DEFAULT_DB_PATH; + + try { + await access(sourcePath); + } catch { + logger.warn(`No local database found at ${sourcePath}; skipping backup.`); + return null; + } + + const timestamp = new Date().toISOString().replace(/:/g, '-'); + const backupDir = path.join(process.cwd(), 'backups'); + await mkdir(backupDir, { recursive: true }); + + const backupPath = path.join(backupDir, `db-${timestamp}.sqlite`); + + const db = new Database(sourcePath, { readonly: true }); + try { + const snapshot = db.serialize(); + await Bun.write(backupPath, snapshot); + } finally { + db.close(); + } + + logger.success(`Backup saved: ${backupPath}`); + return { sourcePath, backupPath }; +} + +async function restoreBackup(backup: MigrationBackup | null): Promise { + if (backup === null) { + return; + } + + const bytes = await Bun.file(backup.backupPath).bytes(); + const restoredDb = Database.deserialize(bytes); + + try { + await Bun.write(backup.sourcePath, restoredDb.serialize()); + } finally { + restoredDb.close(); + } + + logger.warn(`Rollback complete. Restored database from ${backup.backupPath}`); +} + +function splitStatements(sql: string): string[] { + return sql + .split(/;\s*(?:\n|$)/g) + .map((statement) => statement.trim()) + .filter((statement) => statement.length > 0); +} + +async function collectChangesFromGenerate(): Promise { + const before = await listSqlFiles(DRIZZLE_DIR); + const generate = await runDrizzleKit(['generate']); + + if (!generate.success) { + if (/conflict|merge/i.test(generate.stderr)) { + throw new Error(`Migration conflict detected. Resolve migration files manually.\n${generate.stderr}`); + } + + throw new Error(`Failed to generate migrations.\n${generate.stderr || generate.stdout}`); + } + + const after = await listSqlFiles(DRIZZLE_DIR); + const changedSql: string[] = []; + + for (const [relativePath, content] of after.entries()) { + const previous = before.get(relativePath); + if (previous === content) { + continue; + } + + changedSql.push(...splitStatements(content)); + } + + return analyzeMigration(changedSql); +} + export async function runMigrateCommand(rawOptions: MigrateCommandOptions): Promise { const options = migrateOptionsSchema.parse(rawOptions); - const shouldContinue = - options.destructive === true - ? true - : await prompts.confirm({ - message: 'This migration may include destructive changes. Continue?', - initial: false, - }); + logger.info('Generating migration files with drizzle-kit...'); + const changes = await collectChangesFromGenerate(); + displayDiff(changes); - if (!shouldContinue) { - logger.warn('Migration cancelled by user.'); + if (options.preview) { + logger.info('Preview mode enabled. No migrations applied.'); return; } - logger.info('Analyzing migration plan...'); - logger.success('Migration scaffold complete. (Placeholder implementation)'); + if (options.production) { + const proceed = await prompts.confirm({ + message: 'Apply migrations to production now?', + initial: false, + }); + + if (!proceed) { + logger.warn('Migration cancelled by user.'); + return; + } + } + + let backup: MigrationBackup | null = null; + + if (changes.some((change) => change.isDestructive)) { + backup = await backupDatabase(); + + const confirmed = await confirmDestructive(changes); + if (!confirmed) { + return; + } + } + + logger.info('Applying migrations with drizzle-kit push...'); + const push = await runDrizzleKit(['push']); + + if (!push.success) { + await restoreBackup(backup); + + if (/connect|econn|database/i.test(push.stderr)) { + throw new Error(`Database connection failed while applying migration.\n${push.stderr}`); + } + + if (/conflict|merge/i.test(push.stderr)) { + throw new Error(`Migration conflict detected during push. Please resolve and retry.\n${push.stderr}`); + } + + throw new Error(`Migration push failed.\n${push.stderr || push.stdout}`); + } + + logger.success('Migration complete!'); } diff --git a/betterbase/packages/cli/src/constants.ts b/betterbase/packages/cli/src/constants.ts new file mode 100644 index 0000000..8daff7f --- /dev/null +++ b/betterbase/packages/cli/src/constants.ts @@ -0,0 +1 @@ +export const DEFAULT_DB_PATH = 'local.db'; diff --git a/betterbase/packages/cli/src/index.ts b/betterbase/packages/cli/src/index.ts index e0136da..313ffb2 100644 --- a/betterbase/packages/cli/src/index.ts +++ b/betterbase/packages/cli/src/index.ts @@ -1,5 +1,6 @@ import { Command, CommanderError } from 'commander'; import { runInitCommand } from './commands/init'; +import { runDevCommand } from './commands/dev'; import { runMigrateCommand } from './commands/migrate'; import * as logger from './utils/logger'; import packageJson from '../package.json'; @@ -24,12 +25,34 @@ export function createProgram(): Command { await runInitCommand({ projectName }); }); + + program + .command('dev') + .description('Watch schema/routes and regenerate .betterbase-context.json') + .argument('[project-root]', 'project root directory', process.cwd()) + .action(async (projectRoot: string) => { + await runDevCommand(projectRoot); + }); + program .command('migrate') - .description('Run BetterBase database migrations') - .option('--destructive', 'allow destructive migration flow') - .action(async (options: { destructive?: boolean }) => { - await runMigrateCommand({ destructive: options.destructive }); + .description('Generate and apply migrations for local development') + .action(async () => { + await runMigrateCommand({}); + }); + + program + .command('migrate:preview') + .description('Preview migration diff without applying changes') + .action(async () => { + await runMigrateCommand({ preview: true }); + }); + + program + .command('migrate:production') + .description('Apply migrations to production (requires confirmation)') + .action(async () => { + await runMigrateCommand({ production: true }); }); return program; diff --git a/betterbase/packages/cli/src/utils/context-generator.ts b/betterbase/packages/cli/src/utils/context-generator.ts new file mode 100644 index 0000000..f290c5f --- /dev/null +++ b/betterbase/packages/cli/src/utils/context-generator.ts @@ -0,0 +1,69 @@ +import { writeFileSync } from 'node:fs'; +import path from 'node:path'; +import { RouteScanner } from './route-scanner'; +import { SchemaScanner } from './schema-scanner'; + +export interface BetterBaseContext { + version: string; + generated_at: string; + tables: Record; + routes: Record; + ai_prompt: string; +} + +export class ContextGenerator { + async generate(projectRoot: string): Promise { + const schemaScanner = new SchemaScanner(path.join(projectRoot, 'src/db/schema.ts')); + const tables = schemaScanner.scan(); + + const routeScanner = new RouteScanner(); + const routes = await routeScanner.scan(path.join(projectRoot, 'src/routes')); + + const context: BetterBaseContext = { + version: '1.0.0', + generated_at: new Date().toISOString(), + tables, + routes, + ai_prompt: this.generateAIPrompt(tables, routes), + }; + + const outputPath = path.join(projectRoot, '.betterbase-context.json'); + writeFileSync(outputPath, `${JSON.stringify(context, null, 2)}\n`); + console.log(`āœ… Generated ${outputPath}`); + + return context; + } + + private generateAIPrompt(tables: Record, routes: Record): string { + const tableNames = Object.keys(tables); + const routeCount = Object.values(routes).reduce((count, methods) => count + (Array.isArray(methods) ? methods.length : 0), 0); + + let prompt = `This is a BetterBase backend project with ${tableNames.length} tables and ${routeCount} API endpoints.\n\n`; + + prompt += 'DATABASE SCHEMA:\n'; + for (const tableName of tableNames) { + const table = tables[tableName]; + const columns = Object.keys(table.columns ?? {}).join(', '); + prompt += `- ${tableName}: ${columns}\n`; + if (Array.isArray(table.relations) && table.relations.length > 0) { + prompt += ` Relations: ${table.relations.join(', ')}\n`; + } + } + + prompt += '\nAPI ENDPOINTS:\n'; + for (const [routePath, methods] of Object.entries(routes)) { + for (const route of methods as Array<{ method: string; requiresAuth: boolean }>) { + const auth = route.requiresAuth ? ' [AUTH REQUIRED]' : ''; + prompt += `- ${route.method} ${routePath}${auth}\n`; + } + } + + prompt += '\nWhen writing code for this project:\n'; + prompt += "1. Always import tables from './src/db/schema'\n"; + prompt += '2. Use Drizzle ORM for database queries\n'; + prompt += '3. Validate inputs with Zod\n'; + prompt += '4. Return JSON responses with proper status codes\n'; + + return prompt; + } +} diff --git a/betterbase/packages/cli/src/utils/route-scanner.ts b/betterbase/packages/cli/src/utils/route-scanner.ts new file mode 100644 index 0000000..a8e6028 --- /dev/null +++ b/betterbase/packages/cli/src/utils/route-scanner.ts @@ -0,0 +1,161 @@ +import { readdirSync, readFileSync } from 'node:fs'; +import path from 'node:path'; +import * as ts from 'typescript'; + +export interface RouteInfo { + method: string; + path: string; + requiresAuth: boolean; + inputSchema?: string; + outputSchema?: string; +} + +function getStringLiteral(node: ts.Node | undefined): string { + if (!node) return ''; + if (ts.isStringLiteral(node) || ts.isNoSubstitutionTemplateLiteral(node)) { + return node.text; + } + return node.getText(); +} + +function collectTsFiles(dir: string): string[] { + const files: string[] = []; + + const walk = (current: string): void => { + let entries: ReturnType; + try { + entries = readdirSync(current, { withFileTypes: true }); + } catch { + return; + } + + for (const entry of entries) { + const fullPath = path.join(current, entry.name); + if (entry.isDirectory()) { + walk(fullPath); + continue; + } + + if (entry.isFile() && entry.name.endsWith('.ts')) { + files.push(fullPath); + } + } + }; + + walk(dir); + return files; +} + +export class RouteScanner { + async scan(routesDir: string): Promise> { + const files = collectTsFiles(routesDir); + const routes: Record = {}; + + for (const file of files) { + const fileRoutes = this.scanFile(file); + for (const [routePath, entries] of Object.entries(fileRoutes)) { + routes[routePath] = [...(routes[routePath] ?? []), ...entries]; + } + } + + return routes; + } + + private scanFile(filePath: string): Record { + const sourceCode = readFileSync(filePath, 'utf-8'); + const sourceFile = ts.createSourceFile(filePath, sourceCode, ts.ScriptTarget.Latest, true, ts.ScriptKind.TS); + + const routes: Record = {}; + const authIdentifiers = new Set(); + + const isAuthMiddlewareExpression = (expr: ts.Expression): boolean => { + if (ts.isIdentifier(expr)) { + return authIdentifiers.has(expr.text) || /auth/i.test(expr.text); + } + + if (ts.isPropertyAccessExpression(expr)) { + const text = expr.getText(sourceFile); + return /auth/i.test(text); + } + + return false; + }; + + const collectAuthIdentifiers = (node: ts.Node): void => { + if (!ts.isVariableStatement(node)) return; + + for (const declaration of node.declarationList.declarations) { + if (!ts.isIdentifier(declaration.name) || !declaration.initializer) continue; + const initializer = declaration.initializer; + if (ts.isCallExpression(initializer) && ts.isIdentifier(initializer.expression)) { + if (initializer.expression.text === 'createMiddleware' || initializer.expression.text === 'requireAuth') { + authIdentifiers.add(declaration.name.text); + } + } + + if (/auth/i.test(declaration.name.text)) { + authIdentifiers.add(declaration.name.text); + } + } + }; + + ts.forEachChild(sourceFile, collectAuthIdentifiers); + + const visit = (node: ts.Node): void => { + if (ts.isCallExpression(node) && ts.isPropertyAccessExpression(node.expression)) { + const method = node.expression.name.text.toLowerCase(); + const httpMethods = new Set(['get', 'post', 'put', 'patch', 'delete', 'options', 'head']); + + if (httpMethods.has(method)) { + const [pathArg, ...handlerArgs] = node.arguments; + const routePath = getStringLiteral(pathArg); + + let requiresAuth = false; + for (const arg of handlerArgs) { + if (isAuthMiddlewareExpression(arg)) { + requiresAuth = true; + break; + } + } + + const route: RouteInfo = { + method: method.toUpperCase(), + path: routePath, + requiresAuth, + inputSchema: this.findSchemaUsage(sourceFile, handlerArgs, 'input'), + outputSchema: this.findSchemaUsage(sourceFile, handlerArgs, 'output'), + }; + + if (!routes[routePath]) { + routes[routePath] = []; + } + + routes[routePath].push(route); + } + } + + ts.forEachChild(node, visit); + }; + + visit(sourceFile); + return routes; + } + + private findSchemaUsage(sourceFile: ts.SourceFile, args: ts.NodeArray, mode: 'input' | 'output'): string | undefined { + const text = args.map((arg) => arg.getText(sourceFile)).join('\n'); + + if (mode === 'input') { + const parseMatch = text.match(/([A-Za-z0-9_]+Schema)\.(safeParse|parse)\(/); + if (parseMatch) return parseMatch[1]; + const middlewareMatch = text.match(/parseBody\(([^,]+),/); + if (middlewareMatch) return middlewareMatch[1].trim(); + } + + if (mode === 'output') { + const outputMatch = text.match(/([A-Za-z0-9_]+Schema)\.(parse|safeParse)\([^)]*c\.json/); + if (outputMatch) return outputMatch[1]; + } + + return undefined; + } +} diff --git a/betterbase/packages/cli/src/utils/scanner.ts b/betterbase/packages/cli/src/utils/scanner.ts new file mode 100644 index 0000000..2221507 --- /dev/null +++ b/betterbase/packages/cli/src/utils/scanner.ts @@ -0,0 +1,251 @@ +import { readFileSync } from 'node:fs'; +import * as ts from 'typescript'; + +export interface ColumnInfo { + name: string; + type: string; + nullable: boolean; + unique: boolean; + primaryKey: boolean; + defaultValue?: string; + references?: string; +} + +export interface TableInfo { + name: string; + columns: Record; + relations: string[]; + indexes: string[]; +} + +function unwrapExpression(expression: ts.Expression): ts.Expression { + let current = expression; + + while ( + ts.isParenthesizedExpression(current) || + ts.isAsExpression(current) || + ts.isTypeAssertionExpression(current) || + ts.isSatisfiesExpression(current) + ) { + if (ts.isParenthesizedExpression(current)) { + current = current.expression; + continue; + } + + if (ts.isAsExpression(current) || ts.isTypeAssertionExpression(current) || ts.isSatisfiesExpression(current)) { + current = current.expression; + continue; + } + } + + return current; +} + +function getCallName(call: ts.CallExpression): string { + if (ts.isIdentifier(call.expression)) { + return call.expression.text; + } + + if (ts.isPropertyAccessExpression(call.expression)) { + return call.expression.name.text; + } + + return ''; +} + +function getExpressionText(sourceFile: ts.SourceFile, node: ts.Node | undefined): string { + if (!node) { + return ''; + } + + return node.getText(sourceFile); +} + +export class SchemaScanner { + private readonly sourceFile: ts.SourceFile; + + constructor(schemaPath: string) { + const sourceCode = readFileSync(schemaPath, 'utf-8'); + this.sourceFile = ts.createSourceFile('schema.ts', sourceCode, ts.ScriptTarget.Latest, true, ts.ScriptKind.TS); + } + + scan(): Record { + const tables: Record = {}; + + const visit = (node: ts.Node): void => { + if (ts.isVariableStatement(node)) { + for (const declaration of node.declarationList.declarations) { + if (!ts.isIdentifier(declaration.name) || !declaration.initializer) { + continue; + } + + const initializer = unwrapExpression(declaration.initializer); + if (!ts.isCallExpression(initializer)) { + continue; + } + + const functionName = getCallName(initializer); + if (functionName === 'sqliteTable' || functionName === 'pgTable' || functionName === 'mysqlTable') { + tables[declaration.name.text] = this.parseTable(initializer); + } + } + } + + ts.forEachChild(node, visit); + }; + + visit(this.sourceFile); + return tables; + } + + private parseTable(callExpression: ts.CallExpression): TableInfo { + const [nameArg, columnsArg, indexesArg] = callExpression.arguments; + const tableName = ts.isStringLiteral(nameArg) ? nameArg.text : getExpressionText(this.sourceFile, nameArg); + + const columns: Record = {}; + const relations: string[] = []; + + if (columnsArg && ts.isObjectLiteralExpression(columnsArg)) { + for (const property of columnsArg.properties) { + if (!ts.isPropertyAssignment(property)) { + continue; + } + + const columnName = ts.isIdentifier(property.name) + ? property.name.text + : ts.isStringLiteral(property.name) + ? property.name.text + : property.name.getText(this.sourceFile); + + const columnInfo = this.parseColumn(columnName, property.initializer); + columns[columnName] = columnInfo; + + if (columnInfo.references) { + relations.push(columnInfo.references); + } + } + } + + const indexes = this.parseIndexes(indexesArg); + + return { + name: tableName, + columns, + relations, + indexes, + }; + } + + private parseIndexes(indexesArg: ts.Expression | undefined): string[] { + if (!indexesArg) { + return []; + } + + const indexes: string[] = []; + const indexRoot = unwrapExpression(indexesArg); + + const collectFromObject = (obj: ts.ObjectLiteralExpression): void => { + for (const property of obj.properties) { + if (!ts.isPropertyAssignment(property)) { + continue; + } + + const value = unwrapExpression(property.initializer); + if (!ts.isCallExpression(value)) { + continue; + } + + const callName = getCallName(value); + if (callName === 'index' || callName === 'uniqueIndex') { + const key = ts.isIdentifier(property.name) + ? property.name.text + : ts.isStringLiteral(property.name) + ? property.name.text + : property.name.getText(this.sourceFile); + indexes.push(key); + } + } + }; + + if (ts.isArrowFunction(indexRoot) || ts.isFunctionExpression(indexRoot)) { + const body = indexRoot.body; + if (ts.isObjectLiteralExpression(body)) { + collectFromObject(body); + } + + if (ts.isBlock(body)) { + for (const statement of body.statements) { + if (!ts.isReturnStatement(statement) || !statement.expression) { + continue; + } + + const expression = unwrapExpression(statement.expression); + if (ts.isObjectLiteralExpression(expression)) { + collectFromObject(expression); + } + } + } + } + + return indexes; + } + + private parseColumn(columnName: string, expression: ts.Expression): ColumnInfo { + let type = 'unknown'; + let nullable = true; + let unique = false; + let primaryKey = false; + let defaultValue: string | undefined; + let references: string | undefined; + + let current = unwrapExpression(expression); + + while (ts.isCallExpression(current)) { + const methodName = getCallName(current); + + if (methodName === 'text' || methodName === 'varchar' || methodName === 'char') { + type = 'text'; + } else if (methodName === 'integer' || methodName === 'int' || methodName === 'bigint' || methodName === 'serial') { + type = 'integer'; + } else if (methodName === 'real' || methodName === 'numeric' || methodName === 'decimal' || methodName === 'doublePrecision') { + type = 'number'; + } else if (methodName === 'boolean') { + type = 'boolean'; + } else if (methodName === 'timestamp' || methodName === 'datetime') { + type = 'datetime'; + } else if (methodName === 'json' || methodName === 'jsonb') { + type = 'json'; + } else if (methodName === 'blob') { + type = 'blob'; + } else if (methodName === 'notNull') { + nullable = false; + } else if (methodName === 'unique') { + unique = true; + } else if (methodName === 'primaryKey') { + primaryKey = true; + nullable = false; + } else if (methodName.startsWith('default')) { + defaultValue = getExpressionText(this.sourceFile, current.arguments[0]); + } else if (methodName === 'references') { + references = getExpressionText(this.sourceFile, current.arguments[0]); + } + + if (ts.isPropertyAccessExpression(current.expression)) { + current = unwrapExpression(current.expression.expression); + continue; + } + + break; + } + + return { + name: columnName, + type, + nullable, + unique, + primaryKey, + defaultValue, + references, + }; + } +} diff --git a/betterbase/packages/cli/src/utils/schema-scanner.ts b/betterbase/packages/cli/src/utils/schema-scanner.ts new file mode 100644 index 0000000..ff8ea30 --- /dev/null +++ b/betterbase/packages/cli/src/utils/schema-scanner.ts @@ -0,0 +1,2 @@ +export { SchemaScanner } from './scanner'; +export type { ColumnInfo, TableInfo } from './scanner'; diff --git a/betterbase/packages/cli/test/context-generator.test.ts b/betterbase/packages/cli/test/context-generator.test.ts new file mode 100644 index 0000000..cc300eb --- /dev/null +++ b/betterbase/packages/cli/test/context-generator.test.ts @@ -0,0 +1,49 @@ +import { mkdirSync, mkdtempSync, readFileSync, rmSync, writeFileSync } from 'node:fs'; +import { tmpdir } from 'node:os'; +import path from 'node:path'; +import { describe, expect, test } from 'bun:test'; +import { ContextGenerator } from '../src/utils/context-generator'; + +describe('ContextGenerator', () => { + test('creates .betterbase-context.json from schema and routes', async () => { + const root = mkdtempSync(path.join(tmpdir(), 'bb-context-')); + + try { + mkdirSync(path.join(root, 'src/db'), { recursive: true }); + mkdirSync(path.join(root, 'src/routes'), { recursive: true }); + + writeFileSync( + path.join(root, 'src/db/schema.ts'), + ` + import { sqliteTable, text } from 'drizzle-orm/sqlite-core'; + export const users = sqliteTable('users', { + id: text('id').primaryKey(), + email: text('email').notNull(), + }); + `, + ); + + writeFileSync( + path.join(root, 'src/routes/index.ts'), + ` + import { Hono } from 'hono'; + const app = new Hono(); + app.get('/health', (c) => c.json({ ok: true })); + export default app; + `, + ); + + const generator = new ContextGenerator(); + const context = await generator.generate(root); + + expect(context.tables.users).toBeDefined(); + expect(context.routes['/health']).toBeDefined(); + + const file = JSON.parse(readFileSync(path.join(root, '.betterbase-context.json'), 'utf-8')); + expect(file.tables.users.name).toBe('users'); + expect(file.routes['/health'][0].method).toBe('GET'); + } finally { + rmSync(root, { recursive: true, force: true }); + } + }); +}); diff --git a/betterbase/packages/cli/test/route-scanner.test.ts b/betterbase/packages/cli/test/route-scanner.test.ts new file mode 100644 index 0000000..7307db8 --- /dev/null +++ b/betterbase/packages/cli/test/route-scanner.test.ts @@ -0,0 +1,45 @@ +import { mkdirSync, mkdtempSync, rmSync, writeFileSync } from 'node:fs'; +import { tmpdir } from 'node:os'; +import path from 'node:path'; +import { describe, expect, test } from 'bun:test'; +import { RouteScanner } from '../src/utils/route-scanner'; + +describe('RouteScanner', () => { + test('extracts hono routes with auth and schemas', async () => { + const root = mkdtempSync(path.join(tmpdir(), 'bb-routes-')); + + try { + const routesDir = path.join(root, 'src/routes'); + mkdirSync(routesDir, { recursive: true }); + + writeFileSync( + path.join(routesDir, 'users.ts'), + ` + import { Hono } from 'hono'; + import { z } from 'zod'; + import { authMiddleware } from '../middleware/auth'; + + const createUserSchema = z.object({ email: z.string().email() }); + export const users = new Hono(); + + users.get('/users', authMiddleware, (c) => c.json({ users: [] })); + users.post('/users', async (c) => { + const body = await c.req.json(); + createUserSchema.parse(body); + return c.json({ ok: true }); + }); + `, + ); + + const scanner = new RouteScanner(); + const routes = await scanner.scan(routesDir); + + expect(routes['/users']).toBeDefined(); + expect(routes['/users'].length).toBe(2); + expect(routes['/users'][0].requiresAuth).toBe(true); + expect(routes['/users'][1].inputSchema).toBe('createUserSchema'); + } finally { + rmSync(root, { recursive: true, force: true }); + } + }); +}); diff --git a/betterbase/packages/cli/test/scanner.test.ts b/betterbase/packages/cli/test/scanner.test.ts new file mode 100644 index 0000000..edf5594 --- /dev/null +++ b/betterbase/packages/cli/test/scanner.test.ts @@ -0,0 +1,61 @@ +import { mkdtempSync, rmSync, writeFileSync } from 'node:fs'; +import { tmpdir } from 'node:os'; +import path from 'node:path'; +import { describe, expect, test } from 'bun:test'; +import { SchemaScanner } from '../src/utils/scanner'; + +describe('SchemaScanner', () => { + test('extracts tables, columns, relations, and indexes from drizzle schema', () => { + const dir = mkdtempSync(path.join(tmpdir(), 'bb-scanner-')); + + try { + const schemaPath = path.join(dir, 'schema.ts'); + writeFileSync( + schemaPath, + ` + import { sqliteTable, text, integer, index } from 'drizzle-orm/sqlite-core'; + + export const users = sqliteTable('users', { + id: text('id').primaryKey(), + email: text('email').notNull().unique(), + age: integer('age').default(18), + }, (table) => ({ + usersEmailIdx: index('users_email_idx').on(table.email), + })); + + export const posts = sqliteTable('posts', { + id: text('id').primaryKey(), + userId: text('user_id').notNull().references(() => users.id), + title: text('title').notNull(), + }); + + export const comments = sqliteTable('comments', { + id: text('id').primaryKey(), + postId: text('post_id').notNull().references(() => posts.id), + body: text('body'), + }); + `, + ); + + const scanner = new SchemaScanner(schemaPath); + const tables = scanner.scan(); + + expect(Object.keys(tables)).toEqual(['users', 'posts', 'comments']); + + expect(tables.users.name).toBe('users'); + expect(tables.users.columns.id.primaryKey).toBe(true); + expect(tables.users.columns.id.nullable).toBe(false); + expect(tables.users.columns.email.unique).toBe(true); + expect(tables.users.columns.age.defaultValue).toBe('18'); + expect(tables.users.indexes).toContain('usersEmailIdx'); + + expect(tables.posts.columns.userId.references).toBe('() => users.id'); + expect(tables.posts.relations).toContain('() => users.id'); + + expect(tables.comments.columns.postId.references).toBe('() => posts.id'); + expect(tables.comments.relations).toContain('() => posts.id'); + } finally { + rmSync(dir, { recursive: true, force: true }); + } + }); +}); diff --git a/betterbase/packages/cli/test/smoke.test.ts b/betterbase/packages/cli/test/smoke.test.ts index f082470..4e00aa3 100644 --- a/betterbase/packages/cli/test/smoke.test.ts +++ b/betterbase/packages/cli/test/smoke.test.ts @@ -14,9 +14,22 @@ describe('cli', () => { expect(init?.registeredArguments[0]?.name()).toBe('project-name'); }); - test('registers migrate command', () => { + + test('registers dev command', () => { + const program = createProgram(); + const dev = program.commands.find((command) => command.name() === 'dev'); + expect(dev).toBeDefined(); + }); + + test('registers migrate commands', () => { const program = createProgram(); + const migrate = program.commands.find((command) => command.name() === 'migrate'); + const preview = program.commands.find((command) => command.name() === 'migrate:preview'); + const production = program.commands.find((command) => command.name() === 'migrate:production'); + expect(migrate).toBeDefined(); + expect(preview).toBeDefined(); + expect(production).toBeDefined(); }); }); diff --git a/betterbase/templates/base/README.md b/betterbase/templates/base/README.md index e6d4cf1..8d481e3 100644 --- a/betterbase/templates/base/README.md +++ b/betterbase/templates/base/README.md @@ -26,3 +26,15 @@ src/ betterbase.config.ts drizzle.config.ts ``` + + +## Quick Start + +- Install dependencies: `bun install` +- Start development server: `bun run dev` +- Generate Drizzle migrations: `bun run db:generate` +- Apply migrations locally: `bun run db:push` +- Build for production: `bun run build` +- Start production server: `bun run start` + +Environment variables are validated in `src/lib/env.ts` (`NODE_ENV`, `PORT`, `DB_PATH`). diff --git a/betterbase/templates/base/package.json b/betterbase/templates/base/package.json index e631611..c74eee0 100644 --- a/betterbase/templates/base/package.json +++ b/betterbase/templates/base/package.json @@ -6,7 +6,9 @@ "dev": "bun --hot run src/index.ts", "db:generate": "drizzle-kit generate", "db:push": "bun run src/db/migrate.ts", - "typecheck": "tsc --noEmit" + "typecheck": "tsc --noEmit", + "build": "bun build src/index.ts --outfile dist/index.js --target bun", + "start": "bun run dist/index.js" }, "dependencies": { "hono": "^4.6.10", diff --git a/betterbase/templates/base/src/db/index.ts b/betterbase/templates/base/src/db/index.ts index ddbbc3f..485c7b4 100644 --- a/betterbase/templates/base/src/db/index.ts +++ b/betterbase/templates/base/src/db/index.ts @@ -1,8 +1,10 @@ import { Database } from 'bun:sqlite'; import { drizzle } from 'drizzle-orm/bun-sqlite'; +import { env } from '../lib/env'; import * as schema from './schema'; -const dbPath = process.env.DB_PATH ?? 'local.db'; -const sqlite = new Database(dbPath, { create: true }); + +// env.DB_PATH is always present because env schema provides a default. +const sqlite = new Database(env.DB_PATH, { create: true }); export const db = drizzle(sqlite, { schema }); diff --git a/betterbase/templates/base/src/index.ts b/betterbase/templates/base/src/index.ts index 4065ba5..359c1c5 100644 --- a/betterbase/templates/base/src/index.ts +++ b/betterbase/templates/base/src/index.ts @@ -1,4 +1,5 @@ import { Hono } from 'hono'; +import { env } from './lib/env'; import { registerRoutes } from './routes'; const app = new Hono(); @@ -6,8 +7,8 @@ registerRoutes(app); const server = Bun.serve({ fetch: app.fetch, - port: Number(process.env.PORT ?? 3000), - development: process.env.NODE_ENV === 'development', + port: env.PORT, + development: env.NODE_ENV === 'development', }); console.log(`šŸš€ Server running at http://localhost:${server.port}`); diff --git a/betterbase/templates/base/src/lib/env.ts b/betterbase/templates/base/src/lib/env.ts index c4a0b38..c61b432 100644 --- a/betterbase/templates/base/src/lib/env.ts +++ b/betterbase/templates/base/src/lib/env.ts @@ -1,8 +1,11 @@ import { z } from 'zod'; +export const DEFAULT_DB_PATH = 'local.db'; + const envSchema = z.object({ NODE_ENV: z.enum(['development', 'test', 'production']).default('development'), PORT: z.coerce.number().default(3000), + DB_PATH: z.string().min(1).default(DEFAULT_DB_PATH), }); export const env = envSchema.parse(process.env); diff --git a/betterbase/templates/base/src/routes/index.ts b/betterbase/templates/base/src/routes/index.ts index 64a9e83..2fb73e6 100644 --- a/betterbase/templates/base/src/routes/index.ts +++ b/betterbase/templates/base/src/routes/index.ts @@ -2,6 +2,7 @@ import { Hono } from 'hono'; import { cors } from 'hono/cors'; import { logger } from 'hono/logger'; import { HTTPException } from 'hono/http-exception'; +import { env } from '../lib/env'; import { healthRoute } from './health'; import { usersRoute } from './users'; @@ -11,12 +12,12 @@ export function registerRoutes(app: Hono): void { app.onError((err, c) => { const isHttpError = err instanceof HTTPException; - const showDetailedError = process.env.NODE_ENV === 'development' || isHttpError; + const showDetailedError = env.NODE_ENV === 'development' || isHttpError; return c.json( { error: showDetailedError ? err.message : 'Internal Server Error', - stack: process.env.NODE_ENV === 'development' ? err.stack : undefined, + stack: env.NODE_ENV === 'development' ? err.stack : undefined, details: isHttpError ? (err as { cause?: unknown }).cause ?? null : null, }, isHttpError ? err.status : 500, diff --git a/betterbase/templates/base/src/routes/users.ts b/betterbase/templates/base/src/routes/users.ts index 29f17c6..5f2605b 100644 --- a/betterbase/templates/base/src/routes/users.ts +++ b/betterbase/templates/base/src/routes/users.ts @@ -10,11 +10,58 @@ export const createUserSchema = z.object({ name: z.string().min(1), }); +const DEFAULT_LIMIT = 25; +const MAX_LIMIT = 100; +const DEFAULT_OFFSET = 0; + +const paginationSchema = z.object({ + limit: z.coerce.number().int().nonnegative().default(DEFAULT_LIMIT), + offset: z.coerce.number().int().nonnegative().default(DEFAULT_OFFSET), +}); + export const usersRoute = new Hono(); usersRoute.get('/', async (c) => { - const allUsers = await db.select().from(users); - return c.json({ users: allUsers }); + const pagination = paginationSchema.parse({ + limit: c.req.query('limit') ?? undefined, + offset: c.req.query('offset') ?? undefined, + }); + + const limit = Math.min(pagination.limit, MAX_LIMIT); + const offset = pagination.offset; + + if (limit === 0) { + return c.json({ + users: [], + pagination: { + limit, + offset, + hasMore: false, + }, + }); + } + + try { + const rows = await db.select().from(users).limit(limit + 1).offset(offset); + const hasMore = rows.length > limit; + const paginatedUsers = rows.slice(0, limit); + + return c.json({ + users: paginatedUsers, + pagination: { + limit, + offset, + hasMore, + }, + }); + } catch (error) { + if (error instanceof HTTPException) { + throw error; + } + + console.error('Failed to fetch users:', error); + throw error; + } }); usersRoute.post('/', async (c) => { diff --git a/betterbase/tsconfig.base.json b/betterbase/tsconfig.base.json index b1d6a90..e86a10c 100644 --- a/betterbase/tsconfig.base.json +++ b/betterbase/tsconfig.base.json @@ -8,6 +8,7 @@ "skipLibCheck": true, "resolveJsonModule": true, "isolatedModules": true, - "forceConsistentCasingInFileNames": true + "forceConsistentCasingInFileNames": true, + "declaration": true } } From 0c155514d4c08a8d90739b408f41f21d8e150576 Mon Sep 17 00:00:00 2001 From: Ziad Khaled Date: Thu, 19 Feb 2026 04:23:21 +0200 Subject: [PATCH 08/12] Address review fixes for scanners, dev watch, and pagination --- betterbase/.gitignore | 1 + betterbase/README.md | 4 +- betterbase/packages/cli/package.json | 3 +- betterbase/packages/cli/src/commands/dev.ts | 50 +++++++++++----- betterbase/packages/cli/src/commands/init.ts | 24 ++++++-- .../packages/cli/src/commands/migrate.ts | 12 +--- .../cli/src/utils/context-generator.ts | 41 ++++++++----- .../packages/cli/src/utils/route-scanner.ts | 14 +++-- betterbase/packages/cli/src/utils/scanner.ts | 23 ++++---- .../cli/test/context-generator.test.ts | 58 +++++++++++++++++++ .../packages/cli/test/route-scanner.test.ts | 2 +- betterbase/templates/base/src/routes/users.ts | 47 +++++++++------ betterbase/tsconfig.base.json | 3 +- 13 files changed, 202 insertions(+), 80 deletions(-) diff --git a/betterbase/.gitignore b/betterbase/.gitignore index 9b75d8d..240b1f4 100644 --- a/betterbase/.gitignore +++ b/betterbase/.gitignore @@ -7,6 +7,7 @@ dist *.sqlite *.db *.tsbuildinfo +.betterbase-context.json .vscode/ .idea/ diff --git a/betterbase/README.md b/betterbase/README.md index d9aaed2..ebc5905 100644 --- a/betterbase/README.md +++ b/betterbase/README.md @@ -27,7 +27,9 @@ From the monorepo root: - `bun install` - `bun run dev` - `bun run build` -- `bun run typecheck` +- `bun run typecheck` (runs `turbo run typecheck --filter '*'`) + +> Note: `templates/base` is not in the root workspace graph (`apps/*`, `packages/*`), so run template checks separately (e.g. `cd templates/base && bun run typecheck`). ## Base Template Commands diff --git a/betterbase/packages/cli/package.json b/betterbase/packages/cli/package.json index d4ba78a..54f6fb8 100644 --- a/betterbase/packages/cli/package.json +++ b/betterbase/packages/cli/package.json @@ -17,8 +17,7 @@ "commander": "^12.1.0", "inquirer": "^10.2.2", "zod": "^3.23.8", - "typescript": "^5.3.0", - "@typescript/vfs": "^1.5.0" + "typescript": "^5.3.0" }, "devDependencies": { "@types/bun": "^1.3.9" diff --git a/betterbase/packages/cli/src/commands/dev.ts b/betterbase/packages/cli/src/commands/dev.ts index f801372..3720944 100644 --- a/betterbase/packages/cli/src/commands/dev.ts +++ b/betterbase/packages/cli/src/commands/dev.ts @@ -1,6 +1,7 @@ -import { watch } from 'node:fs'; +import { existsSync, watch } from 'node:fs'; import path from 'node:path'; import { ContextGenerator } from '../utils/context-generator'; +import * as logger from '../utils/logger'; export async function runDevCommand(projectRoot: string = process.cwd()): Promise { const generator = new ContextGenerator(); @@ -8,21 +9,42 @@ export async function runDevCommand(projectRoot: string = process.cwd()): Promis await generator.generate(projectRoot); const watchPaths = [path.join(projectRoot, 'src/db/schema.ts'), path.join(projectRoot, 'src/routes')]; + const timers = new Map>(); for (const watchPath of watchPaths) { - watch(watchPath, { recursive: true }, async (_eventType, filename) => { - console.log(`šŸ“ File changed: ${String(filename ?? '')}`); - console.log('šŸ”„ Regenerating context...'); - - const start = Date.now(); - try { - await generator.generate(projectRoot); - console.log(`āœ… Context updated in ${Date.now() - start}ms`); - } catch (error) { - const message = error instanceof Error ? error.message : String(error); - console.error(`āŒ Failed to regenerate context: ${message}`); - } - }); + if (!existsSync(watchPath)) { + logger.warn(`Watch path does not exist; skipping: ${watchPath}`); + continue; + } + + try { + watch(watchPath, { recursive: true }, (_eventType, filename) => { + console.log(`šŸ“ File changed: ${String(filename ?? '')}`); + + const existing = timers.get(watchPath); + if (existing) { + clearTimeout(existing); + } + + const timer = setTimeout(async () => { + console.log('šŸ”„ Regenerating context...'); + const start = Date.now(); + + try { + await generator.generate(projectRoot); + console.log(`āœ… Context updated in ${Date.now() - start}ms`); + } catch (error) { + const message = error instanceof Error ? error.message : String(error); + console.error(`āŒ Failed to regenerate context: ${message}`); + } + }, 250); + + timers.set(watchPath, timer); + }); + } catch (error) { + const message = error instanceof Error ? error.message : String(error); + logger.warn(`Failed to watch path ${watchPath}: ${message}`); + } } console.log('šŸ‘€ Watching for schema and route changes...'); diff --git a/betterbase/packages/cli/src/commands/init.ts b/betterbase/packages/cli/src/commands/init.ts index ccc3741..abe5481 100644 --- a/betterbase/packages/cli/src/commands/init.ts +++ b/betterbase/packages/cli/src/commands/init.ts @@ -338,19 +338,20 @@ import { logger } from 'hono/logger'; import { HTTPException } from 'hono/http-exception'; import { healthRoute } from './health'; import { usersRoute } from './users'; +import { env } from '../lib/env'; -export default function registerRoutes(app: Hono): void { +export function registerRoutes(app: Hono): void { app.use('*', cors()); app.use('*', logger()); app.onError((err, c) => { const isHttpError = err instanceof HTTPException; - const showDetailedError = process.env.NODE_ENV === 'development' || isHttpError; + const showDetailedError = env.NODE_ENV === 'development' || isHttpError; return c.json( { error: showDetailedError ? err.message : 'Internal Server Error', - stack: process.env.NODE_ENV === 'development' ? err.stack : undefined, + stack: env.NODE_ENV === 'development' ? err.stack : undefined, details: isHttpError ? (err as { cause?: unknown }).cause ?? null : null, }, isHttpError ? err.status : 500, @@ -374,6 +375,19 @@ async function writeProjectFiles( await mkdir(path.join(projectPath, 'src/middleware'), { recursive: true }); await mkdir(path.join(projectPath, 'src/lib'), { recursive: true }); + + await writeFile( + path.join(projectPath, 'src/lib/env.ts'), + `import { z } from 'zod'; + +const envSchema = z.object({ + NODE_ENV: z.enum(['development', 'test', 'production']).default('development'), +}); + +export const env = envSchema.parse(process.env); +`, + ); + await writeFile( path.join(projectPath, 'betterbase.config.ts'), `export default { @@ -472,7 +486,7 @@ healthRoute.get('/', async (c) => { `import { HTTPException } from 'hono/http-exception'; import { z } from 'zod'; -export function parseBody(schema: S, body: unknown): z.output { +export function parseBody(schema: S, body: unknown): z.output { const result = schema.safeParse(body); if (!result.success) { @@ -595,7 +609,7 @@ usersRoute.post('/', async (c) => { await writeFile( path.join(projectPath, 'src/index.ts'), `import { Hono } from 'hono'; -import registerRoutes from './routes'; +import { registerRoutes } from './routes'; const app = new Hono(); registerRoutes(app); diff --git a/betterbase/packages/cli/src/commands/migrate.ts b/betterbase/packages/cli/src/commands/migrate.ts index 37573f8..8999867 100644 --- a/betterbase/packages/cli/src/commands/migrate.ts +++ b/betterbase/packages/cli/src/commands/migrate.ts @@ -257,20 +257,14 @@ async function restoreBackup(backup: MigrationBackup | null): Promise { } const bytes = await Bun.file(backup.backupPath).bytes(); - const restoredDb = Database.deserialize(bytes); - - try { - await Bun.write(backup.sourcePath, restoredDb.serialize()); - } finally { - restoredDb.close(); - } + await Bun.write(backup.sourcePath, bytes); logger.warn(`Rollback complete. Restored database from ${backup.backupPath}`); } function splitStatements(sql: string): string[] { return sql - .split(/;\s*(?:\n|$)/g) + .split(/;\s*/g) .map((statement) => statement.trim()) .filter((statement) => statement.length > 0); } @@ -343,7 +337,7 @@ export async function runMigrateCommand(rawOptions: MigrateCommandOptions): Prom if (!push.success) { await restoreBackup(backup); - if (/connect|econn|database/i.test(push.stderr)) { + if (/\b(?:connect(?:ion)?|econnrefused|econnreset|enotfound|etimedout)\b/i.test(push.stderr)) { throw new Error(`Database connection failed while applying migration.\n${push.stderr}`); } diff --git a/betterbase/packages/cli/src/utils/context-generator.ts b/betterbase/packages/cli/src/utils/context-generator.ts index f290c5f..aabf0f4 100644 --- a/betterbase/packages/cli/src/utils/context-generator.ts +++ b/betterbase/packages/cli/src/utils/context-generator.ts @@ -1,23 +1,38 @@ -import { writeFileSync } from 'node:fs'; +import { existsSync, writeFileSync } from 'node:fs'; import path from 'node:path'; -import { RouteScanner } from './route-scanner'; -import { SchemaScanner } from './schema-scanner'; +import { RouteScanner, type RouteInfo } from './route-scanner'; +import { SchemaScanner, type TableInfo } from './schema-scanner'; +import * as logger from './logger'; export interface BetterBaseContext { version: string; generated_at: string; - tables: Record; - routes: Record; + tables: Record; + routes: Record; ai_prompt: string; } export class ContextGenerator { async generate(projectRoot: string): Promise { - const schemaScanner = new SchemaScanner(path.join(projectRoot, 'src/db/schema.ts')); - const tables = schemaScanner.scan(); + const schemaPath = path.join(projectRoot, 'src/db/schema.ts'); + const routesPath = path.join(projectRoot, 'src/routes'); - const routeScanner = new RouteScanner(); - const routes = await routeScanner.scan(path.join(projectRoot, 'src/routes')); + let tables: Record = {}; + let routes: Record = {}; + + if (existsSync(schemaPath)) { + const schemaScanner = new SchemaScanner(schemaPath); + tables = schemaScanner.scan(); + } else { + logger.warn(`Schema file not found; continuing with empty tables: ${schemaPath}`); + } + + if (existsSync(routesPath)) { + const routeScanner = new RouteScanner(); + routes = routeScanner.scan(routesPath); + } else { + logger.warn(`Routes directory not found; continuing with empty routes: ${routesPath}`); + } const context: BetterBaseContext = { version: '1.0.0', @@ -34,9 +49,9 @@ export class ContextGenerator { return context; } - private generateAIPrompt(tables: Record, routes: Record): string { + private generateAIPrompt(tables: Record, routes: Record): string { const tableNames = Object.keys(tables); - const routeCount = Object.values(routes).reduce((count, methods) => count + (Array.isArray(methods) ? methods.length : 0), 0); + const routeCount = Object.values(routes).reduce((count, methods) => count + methods.length, 0); let prompt = `This is a BetterBase backend project with ${tableNames.length} tables and ${routeCount} API endpoints.\n\n`; @@ -45,14 +60,14 @@ export class ContextGenerator { const table = tables[tableName]; const columns = Object.keys(table.columns ?? {}).join(', '); prompt += `- ${tableName}: ${columns}\n`; - if (Array.isArray(table.relations) && table.relations.length > 0) { + if (table.relations.length > 0) { prompt += ` Relations: ${table.relations.join(', ')}\n`; } } prompt += '\nAPI ENDPOINTS:\n'; for (const [routePath, methods] of Object.entries(routes)) { - for (const route of methods as Array<{ method: string; requiresAuth: boolean }>) { + for (const route of methods) { const auth = route.requiresAuth ? ' [AUTH REQUIRED]' : ''; prompt += `- ${route.method} ${routePath}${auth}\n`; } diff --git a/betterbase/packages/cli/src/utils/route-scanner.ts b/betterbase/packages/cli/src/utils/route-scanner.ts index a8e6028..5fc7aa9 100644 --- a/betterbase/packages/cli/src/utils/route-scanner.ts +++ b/betterbase/packages/cli/src/utils/route-scanner.ts @@ -18,6 +18,10 @@ function getStringLiteral(node: ts.Node | undefined): string { return node.getText(); } +function isAuthLikeName(value: string): boolean { + return /\bauth\b/i.test(value) || /^auth/i.test(value) || /^(authMiddleware|requireAuth)$/i.test(value); +} + function collectTsFiles(dir: string): string[] { const files: string[] = []; @@ -36,7 +40,7 @@ function collectTsFiles(dir: string): string[] { continue; } - if (entry.isFile() && entry.name.endsWith('.ts')) { + if (entry.isFile() && entry.name.endsWith('.ts') && !entry.name.endsWith('.d.ts')) { files.push(fullPath); } } @@ -47,7 +51,7 @@ function collectTsFiles(dir: string): string[] { } export class RouteScanner { - async scan(routesDir: string): Promise> { + scan(routesDir: string): Record { const files = collectTsFiles(routesDir); const routes: Record = {}; @@ -70,12 +74,12 @@ export class RouteScanner { const isAuthMiddlewareExpression = (expr: ts.Expression): boolean => { if (ts.isIdentifier(expr)) { - return authIdentifiers.has(expr.text) || /auth/i.test(expr.text); + return authIdentifiers.has(expr.text) || isAuthLikeName(expr.text); } if (ts.isPropertyAccessExpression(expr)) { const text = expr.getText(sourceFile); - return /auth/i.test(text); + return isAuthLikeName(text); } return false; @@ -93,7 +97,7 @@ export class RouteScanner { } } - if (/auth/i.test(declaration.name.text)) { + if (isAuthLikeName(declaration.name.text)) { authIdentifiers.add(declaration.name.text); } } diff --git a/betterbase/packages/cli/src/utils/scanner.ts b/betterbase/packages/cli/src/utils/scanner.ts index 2221507..0dee86d 100644 --- a/betterbase/packages/cli/src/utils/scanner.ts +++ b/betterbase/packages/cli/src/utils/scanner.ts @@ -27,15 +27,8 @@ function unwrapExpression(expression: ts.Expression): ts.Expression { ts.isTypeAssertionExpression(current) || ts.isSatisfiesExpression(current) ) { - if (ts.isParenthesizedExpression(current)) { - current = current.expression; - continue; - } - - if (ts.isAsExpression(current) || ts.isTypeAssertionExpression(current) || ts.isSatisfiesExpression(current)) { - current = current.expression; - continue; - } + current = (current as ts.ParenthesizedExpression | ts.AsExpression | ts.TypeAssertion | ts.SatisfiesExpression) + .expression; } return current; @@ -65,8 +58,16 @@ export class SchemaScanner { private readonly sourceFile: ts.SourceFile; constructor(schemaPath: string) { - const sourceCode = readFileSync(schemaPath, 'utf-8'); - this.sourceFile = ts.createSourceFile('schema.ts', sourceCode, ts.ScriptTarget.Latest, true, ts.ScriptKind.TS); + let sourceCode: string; + + try { + sourceCode = readFileSync(schemaPath, 'utf-8'); + } catch (error) { + const message = error instanceof Error ? error.message : String(error); + throw new Error(`Failed to read schema file at ${schemaPath}: ${message}`); + } + + this.sourceFile = ts.createSourceFile(schemaPath, sourceCode, ts.ScriptTarget.Latest, true, ts.ScriptKind.TS); } scan(): Record { diff --git a/betterbase/packages/cli/test/context-generator.test.ts b/betterbase/packages/cli/test/context-generator.test.ts index cc300eb..2499430 100644 --- a/betterbase/packages/cli/test/context-generator.test.ts +++ b/betterbase/packages/cli/test/context-generator.test.ts @@ -37,13 +37,71 @@ describe('ContextGenerator', () => { const context = await generator.generate(root); expect(context.tables.users).toBeDefined(); + expect(context.tables.users.columns.id).toBeDefined(); + expect(context.tables.users.columns.email).toBeDefined(); expect(context.routes['/health']).toBeDefined(); const file = JSON.parse(readFileSync(path.join(root, '.betterbase-context.json'), 'utf-8')); expect(file.tables.users.name).toBe('users'); + expect(file.tables.users.columns.id.type).toBe('text'); + expect(file.tables.users.columns.email.type).toBe('text'); + expect(Array.isArray(file.routes['/health'])).toBe(true); + expect(file.routes['/health'].length).toBeGreaterThan(0); expect(file.routes['/health'][0].method).toBe('GET'); } finally { rmSync(root, { recursive: true, force: true }); } }); + + test('handles missing routes directory with empty routes', async () => { + const root = mkdtempSync(path.join(tmpdir(), 'bb-context-no-routes-')); + + try { + mkdirSync(path.join(root, 'src/db'), { recursive: true }); + writeFileSync( + path.join(root, 'src/db/schema.ts'), + ` + import { sqliteTable, text } from 'drizzle-orm/sqlite-core'; + export const users = sqliteTable('users', { id: text('id').primaryKey() }); + `, + ); + + const context = await new ContextGenerator().generate(root); + expect(context.routes).toEqual({}); + + const file = JSON.parse(readFileSync(path.join(root, '.betterbase-context.json'), 'utf-8')); + expect(file.routes).toEqual({}); + } finally { + rmSync(root, { recursive: true, force: true }); + } + }); + + test('handles empty schema file with empty tables', async () => { + const root = mkdtempSync(path.join(tmpdir(), 'bb-context-empty-schema-')); + + try { + mkdirSync(path.join(root, 'src/db'), { recursive: true }); + mkdirSync(path.join(root, 'src/routes'), { recursive: true }); + writeFileSync(path.join(root, 'src/db/schema.ts'), 'export {};\n'); + + const context = await new ContextGenerator().generate(root); + expect(context.tables).toEqual({}); + } finally { + rmSync(root, { recursive: true, force: true }); + } + }); + + test('handles missing schema file with empty tables', async () => { + const root = mkdtempSync(path.join(tmpdir(), 'bb-context-no-schema-')); + + try { + mkdirSync(path.join(root, 'src/routes'), { recursive: true }); + writeFileSync(path.join(root, 'src/routes/index.ts'), 'export {};\n'); + + const context = await new ContextGenerator().generate(root); + expect(context.tables).toEqual({}); + } finally { + rmSync(root, { recursive: true, force: true }); + } + }); }); diff --git a/betterbase/packages/cli/test/route-scanner.test.ts b/betterbase/packages/cli/test/route-scanner.test.ts index 7307db8..9f56991 100644 --- a/betterbase/packages/cli/test/route-scanner.test.ts +++ b/betterbase/packages/cli/test/route-scanner.test.ts @@ -32,7 +32,7 @@ describe('RouteScanner', () => { ); const scanner = new RouteScanner(); - const routes = await scanner.scan(routesDir); + const routes = scanner.scan(routesDir); expect(routes['/users']).toBeDefined(); expect(routes['/users'].length).toBe(2); diff --git a/betterbase/templates/base/src/routes/users.ts b/betterbase/templates/base/src/routes/users.ts index 5f2605b..22fff3d 100644 --- a/betterbase/templates/base/src/routes/users.ts +++ b/betterbase/templates/base/src/routes/users.ts @@ -1,6 +1,6 @@ import { Hono } from 'hono'; import { HTTPException } from 'hono/http-exception'; -import { z } from 'zod'; +import { z, ZodError } from 'zod'; import { db } from '../db'; import { users } from '../db/schema'; import { parseBody } from '../middleware/validation'; @@ -22,26 +22,27 @@ const paginationSchema = z.object({ export const usersRoute = new Hono(); usersRoute.get('/', async (c) => { - const pagination = paginationSchema.parse({ - limit: c.req.query('limit') ?? undefined, - offset: c.req.query('offset') ?? undefined, - }); + try { + const pagination = paginationSchema.parse({ + limit: c.req.query('limit'), + offset: c.req.query('offset'), + }); - const limit = Math.min(pagination.limit, MAX_LIMIT); - const offset = pagination.offset; + const limit = Math.min(pagination.limit, MAX_LIMIT); + const offset = pagination.offset; - if (limit === 0) { - return c.json({ - users: [], - pagination: { - limit, - offset, - hasMore: false, - }, - }); - } + if (limit === 0) { + return c.json({ + users: [], + pagination: { + limit, + offset, + // No DB query is run for limit=0, so hasMore cannot be determined. + hasMore: null, + }, + }); + } - try { const rows = await db.select().from(users).limit(limit + 1).offset(offset); const hasMore = rows.length > limit; const paginatedUsers = rows.slice(0, limit); @@ -59,6 +60,16 @@ usersRoute.get('/', async (c) => { throw error; } + if (error instanceof ZodError) { + return c.json( + { + error: 'Invalid pagination query parameters', + details: error.issues, + }, + 400, + ); + } + console.error('Failed to fetch users:', error); throw error; } diff --git a/betterbase/tsconfig.base.json b/betterbase/tsconfig.base.json index e86a10c..574a5e4 100644 --- a/betterbase/tsconfig.base.json +++ b/betterbase/tsconfig.base.json @@ -9,6 +9,7 @@ "resolveJsonModule": true, "isolatedModules": true, "forceConsistentCasingInFileNames": true, - "declaration": true + "declaration": true, + "declarationDir": "dist/types" } } From fc5278f7b39bc094490a01548fe68f6dea0a8a9f Mon Sep 17 00:00:00 2001 From: Ziad Khaled Date: Thu, 19 Feb 2026 04:35:39 +0200 Subject: [PATCH 09/12] Add bb auth setup command with BetterAuth scaffolding --- betterbase/.gitignore | 7 +- betterbase/README.md | 12 + betterbase/apps/cli/tsconfig.json | 11 +- betterbase/package.json | 4 +- betterbase/packages/cli/package.json | 6 +- betterbase/packages/cli/src/build.ts | 18 +- betterbase/packages/cli/src/commands/auth.ts | 238 ++++++++++++ betterbase/packages/cli/src/commands/dev.ts | 51 +++ betterbase/packages/cli/src/commands/init.ts | 88 ++++- .../packages/cli/src/commands/migrate.ts | 350 +++++++++++++++++- betterbase/packages/cli/src/constants.ts | 1 + betterbase/packages/cli/src/index.ts | 43 ++- .../cli/src/utils/context-generator.ts | 84 +++++ .../packages/cli/src/utils/route-scanner.ts | 165 +++++++++ betterbase/packages/cli/src/utils/scanner.ts | 252 +++++++++++++ .../packages/cli/src/utils/schema-scanner.ts | 2 + .../cli/test/context-generator.test.ts | 107 ++++++ .../packages/cli/test/route-scanner.test.ts | 45 +++ betterbase/packages/cli/test/scanner.test.ts | 61 +++ betterbase/packages/cli/test/smoke.test.ts | 25 +- betterbase/templates/base/README.md | 12 + betterbase/templates/base/package.json | 4 +- betterbase/templates/base/src/db/index.ts | 6 +- betterbase/templates/base/src/index.ts | 5 +- betterbase/templates/base/src/lib/env.ts | 3 + betterbase/templates/base/src/routes/index.ts | 5 +- betterbase/templates/base/src/routes/users.ts | 64 +++- betterbase/tsconfig.base.json | 4 +- 28 files changed, 1613 insertions(+), 60 deletions(-) create mode 100644 betterbase/packages/cli/src/commands/auth.ts create mode 100644 betterbase/packages/cli/src/commands/dev.ts create mode 100644 betterbase/packages/cli/src/constants.ts create mode 100644 betterbase/packages/cli/src/utils/context-generator.ts create mode 100644 betterbase/packages/cli/src/utils/route-scanner.ts create mode 100644 betterbase/packages/cli/src/utils/scanner.ts create mode 100644 betterbase/packages/cli/src/utils/schema-scanner.ts create mode 100644 betterbase/packages/cli/test/context-generator.test.ts create mode 100644 betterbase/packages/cli/test/route-scanner.test.ts create mode 100644 betterbase/packages/cli/test/scanner.test.ts diff --git a/betterbase/.gitignore b/betterbase/.gitignore index 51c8bb1..240b1f4 100644 --- a/betterbase/.gitignore +++ b/betterbase/.gitignore @@ -3,14 +3,17 @@ node_modules .turbo dist .next +*.sqlite3 +*.sqlite +*.db +*.tsbuildinfo +.betterbase-context.json .vscode/ .idea/ .env .env.* -.env.local -.env.test !.env.example *.log diff --git a/betterbase/README.md b/betterbase/README.md index 46bf456..ebc5905 100644 --- a/betterbase/README.md +++ b/betterbase/README.md @@ -19,6 +19,18 @@ Initial BetterBase monorepo scaffold with a concrete base template. - Workspace orchestration: **Turborepo** - Language: **TypeScript** + +## Monorepo Commands + +From the monorepo root: + +- `bun install` +- `bun run dev` +- `bun run build` +- `bun run typecheck` (runs `turbo run typecheck --filter '*'`) + +> Note: `templates/base` is not in the root workspace graph (`apps/*`, `packages/*`), so run template checks separately (e.g. `cd templates/base && bun run typecheck`). + ## Base Template Commands From `templates/base`: diff --git a/betterbase/apps/cli/tsconfig.json b/betterbase/apps/cli/tsconfig.json index 4031161..cd17ee5 100644 --- a/betterbase/apps/cli/tsconfig.json +++ b/betterbase/apps/cli/tsconfig.json @@ -3,7 +3,14 @@ "compilerOptions": { "outDir": "dist", "rootDir": ".", - "types": ["bun"] + "types": [ + "bun" + ] }, - "include": ["src", "test"] + "include": [ + "src/**/*.ts", + "src/**/*.tsx", + "test/**/*.ts", + "test/**/*.tsx" + ] } diff --git a/betterbase/package.json b/betterbase/package.json index eaa5a17..0f4c3d8 100644 --- a/betterbase/package.json +++ b/betterbase/package.json @@ -1,7 +1,7 @@ { "name": "betterbase", "private": true, - "packageManager": "bun@1.1.38", + "packageManager": "bun@1.2.14", "workspaces": [ "apps/*", "packages/*" @@ -10,7 +10,7 @@ "build": "turbo run build", "dev": "turbo run dev --parallel", "lint": "turbo run lint", - "typecheck": "turbo run typecheck" + "typecheck": "turbo run typecheck --filter '*'" }, "devDependencies": { "turbo": "^2.0.0", diff --git a/betterbase/packages/cli/package.json b/betterbase/packages/cli/package.json index 5b8543d..54f6fb8 100644 --- a/betterbase/packages/cli/package.json +++ b/betterbase/packages/cli/package.json @@ -16,11 +16,11 @@ "chalk": "^5.3.0", "commander": "^12.1.0", "inquirer": "^10.2.2", - "zod": "^3.23.8" + "zod": "^3.23.8", + "typescript": "^5.3.0" }, "devDependencies": { - "@types/bun": "^1.3.9", - "typescript": "^5.9.3" + "@types/bun": "^1.3.9" }, "exports": { ".": "./src/index.ts" diff --git a/betterbase/packages/cli/src/build.ts b/betterbase/packages/cli/src/build.ts index 198205e..86937ef 100644 --- a/betterbase/packages/cli/src/build.ts +++ b/betterbase/packages/cli/src/build.ts @@ -1,10 +1,16 @@ +import path from 'node:path'; + /** * Build the CLI as a standalone bundled executable output. */ export async function buildStandaloneCli(): Promise { + const moduleDir = import.meta.dir; + const entrypoint = path.resolve(moduleDir, 'index.ts'); + const outdir = path.resolve(moduleDir, '../dist'); + const result = await Bun.build({ - entrypoints: ['./src/index.ts'], - outdir: './dist', + entrypoints: [entrypoint], + outdir, target: 'bun', format: 'esm', minify: false, @@ -17,7 +23,7 @@ export async function buildStandaloneCli(): Promise { throw new Error(`Build failed with ${result.logs.length} error(s).\n${diagnostics}`); } - const outputPath = './dist/index.js'; + const outputPath = path.join(outdir, 'index.js'); const compiled = await Bun.file(outputPath).text(); await Bun.write(outputPath, `#!/usr/bin/env bun\n${compiled}`); } @@ -26,11 +32,7 @@ async function main(): Promise { await buildStandaloneCli(); } -const isEsmMain = typeof import.meta !== 'undefined' && import.meta.main; -const cjs = globalThis as unknown as { require?: { main?: unknown }; module?: unknown }; -const isCjsMain = cjs.require?.main !== undefined && cjs.require.main === cjs.module; - -if (isEsmMain || isCjsMain) { +if (import.meta.main) { main().catch((error) => { console.error('Build failed:', error); process.exit(1); diff --git a/betterbase/packages/cli/src/commands/auth.ts b/betterbase/packages/cli/src/commands/auth.ts new file mode 100644 index 0000000..3dbc76e --- /dev/null +++ b/betterbase/packages/cli/src/commands/auth.ts @@ -0,0 +1,238 @@ +import { execSync } from 'node:child_process'; +import { existsSync, mkdirSync, readFileSync, writeFileSync } from 'node:fs'; +import path from 'node:path'; +import * as logger from '../utils/logger'; + +const AUTH_SCHEMA_BLOCK = ` +// Auth tables (generated by BetterAuth) +export const sessions = sqliteTable('sessions', { + id: text('id').primaryKey(), + userId: text('user_id').notNull().references(() => users.id), + expiresAt: integer('expires_at', { mode: 'timestamp' }).notNull(), + ipAddress: text('ip_address'), + userAgent: text('user_agent'), +}); + +export const accounts = sqliteTable('accounts', { + id: text('id').primaryKey(), + userId: text('user_id').notNull().references(() => users.id), + provider: text('provider').notNull(), + providerAccountId: text('provider_account_id').notNull(), + accessToken: text('access_token'), + refreshToken: text('refresh_token'), + expiresAt: integer('expires_at', { mode: 'timestamp' }), +}); +`; + +const AUTH_ROUTE_FILE = `import { Hono } from 'hono'; +import { z } from 'zod'; +import { eq } from 'drizzle-orm'; +import { db } from '../db'; +import { users, sessions } from '../db/schema'; + +const authRoute = new Hono(); + +const loginSchema = z.object({ + email: z.string().email(), + password: z.string().min(8), +}); + +authRoute.post('/login', async (c) => { + const body = loginSchema.parse(await c.req.json()); + + const user = await db.select().from(users).where(eq(users.email, body.email)).limit(1); + if (user.length === 0) { + return c.json({ error: 'Invalid credentials' }, 401); + } + + const sessionId = crypto.randomUUID(); + const expiresAt = new Date(Date.now() + 30 * 24 * 60 * 60 * 1000); + + await db.insert(sessions).values({ + id: sessionId, + userId: user[0].id, + expiresAt, + ipAddress: c.req.header('cf-connecting-ip') || c.req.header('x-forwarded-for') || null, + userAgent: c.req.header('user-agent') || null, + }); + + return c.json({ + token: sessionId, + user: { + id: user[0].id, + email: user[0].email, + name: user[0].name, + }, + }); +}); + +authRoute.post('/logout', async (c) => { + const token = c.req.header('Authorization')?.split(' ')[1]; + if (token) { + await db.delete(sessions).where(eq(sessions.id, token)); + } + + return c.json({ message: 'Logged out' }); +}); + +export { authRoute }; +`; + +const AUTH_MIDDLEWARE_FILE = `import { and, eq, gt } from 'drizzle-orm'; +import { createMiddleware } from 'hono/factory'; +import { db } from '../db'; +import { sessions, users } from '../db/schema'; + +export interface AuthContext { + user: { + id: string; + email: string; + name: string | null; + }; +} + +function getSessionToken(authHeader: string | undefined): string | null { + if (!authHeader) return null; + + const parts = authHeader.split(' '); + if (parts.length !== 2 || parts[0] !== 'Bearer') return null; + + return parts[1]; +} + +async function validateSession(token: string): Promise { + const session = await db + .select() + .from(sessions) + .where(and(eq(sessions.id, token), gt(sessions.expiresAt, new Date()))) + .limit(1); + + if (session.length === 0) return null; + + const user = await db.select().from(users).where(eq(users.id, session[0].userId)).limit(1); + return user.length > 0 ? user[0] : null; +} + +export const requireAuth = createMiddleware<{ Variables: AuthContext }>(async (c, next) => { + const token = getSessionToken(c.req.header('Authorization')); + + if (!token) { + return c.json({ error: 'Unauthorized: No token provided' }, 401); + } + + const user = await validateSession(token); + if (!user) { + return c.json({ error: 'Unauthorized: Invalid or expired token' }, 401); + } + + c.set('user', user); + await next(); +}); + +export const optionalAuth = createMiddleware<{ Variables: Partial }>(async (c, next) => { + const token = getSessionToken(c.req.header('Authorization')); + + if (token) { + const user = await validateSession(token); + if (user) { + c.set('user', user); + } + } + + await next(); +}); + +export function getUser(c: { get: (key: 'user') => AuthContext['user'] }): AuthContext['user'] { + return c.get('user'); +} +`; + +function appendIfMissing(filePath: string, marker: string, content: string): void { + const current = readFileSync(filePath, 'utf-8'); + if (current.includes(marker)) { + return; + } + + const next = current.trimEnd() + '\n\n' + content.trim() + '\n'; + writeFileSync(filePath, next); +} + +function ensureAuthInConfig(projectRoot: string): void { + const configPath = path.join(projectRoot, 'betterbase.config.ts'); + if (!existsSync(configPath)) return; + + const current = readFileSync(configPath, 'utf-8'); + if (current.includes('auth: {')) return; + + const updated = current.replace( + 'export default {', + `export default {\n auth: {\n enabled: true,\n secret: process.env.AUTH_SECRET,\n sessionDuration: 30 * 24 * 60 * 60,\n },`, + ); + + writeFileSync(configPath, updated); +} + +function ensureEnvVar(projectRoot: string): void { + const envPath = path.join(projectRoot, '.env.example'); + if (!existsSync(envPath)) return; + + const env = readFileSync(envPath, 'utf-8'); + if (env.includes('AUTH_SECRET=')) return; + + writeFileSync(envPath, `${env.trimEnd()}\n\n# Auth\nAUTH_SECRET=your-secret-key-here-change-in-production\n`); +} + +function ensureRoutesIndexHook(projectRoot: string): void { + const routesIndexPath = path.join(projectRoot, 'src/routes/index.ts'); + if (!existsSync(routesIndexPath)) return; + + let current = readFileSync(routesIndexPath, 'utf-8'); + + if (!current.includes("import { authRoute } from './auth';")) { + current = current.replace("import { usersRoute } from './users';", "import { usersRoute } from './users';\nimport { authRoute } from './auth';"); + } + + if (!current.includes("app.route('/auth', authRoute);")) { + current = current.replace("app.route('/api/users', usersRoute);", "app.route('/api/users', usersRoute);\n app.route('/auth', authRoute);"); + } + + writeFileSync(routesIndexPath, current); +} + +export async function runAuthSetupCommand(projectRoot: string = process.cwd()): Promise { + const resolvedRoot = path.resolve(projectRoot); + const schemaPath = path.join(resolvedRoot, 'src/db/schema.ts'); + const middlewarePath = path.join(resolvedRoot, 'src/middleware/auth.ts'); + const routePath = path.join(resolvedRoot, 'src/routes/auth.ts'); + + if (!existsSync(schemaPath)) { + throw new Error(`Could not find schema file at ${schemaPath}. Run this command from a BetterBase app root.`); + } + + logger.info('šŸ” Setting up authentication...'); + + logger.info('šŸ“¦ Installing better-auth...'); + execSync('bun add better-auth', { cwd: resolvedRoot, stdio: 'inherit' }); + + logger.info('šŸ“ Adding auth tables to schema...'); + appendIfMissing(schemaPath, "export const sessions = sqliteTable('sessions'", AUTH_SCHEMA_BLOCK); + + logger.info('šŸ›”ļø Creating auth middleware...'); + mkdirSync(path.dirname(middlewarePath), { recursive: true }); + writeFileSync(middlewarePath, AUTH_MIDDLEWARE_FILE); + + logger.info('🧭 Creating auth routes...'); + mkdirSync(path.dirname(routePath), { recursive: true }); + writeFileSync(routePath, AUTH_ROUTE_FILE); + ensureRoutesIndexHook(resolvedRoot); + + logger.info('āš™ļø Updating config...'); + ensureAuthInConfig(resolvedRoot); + ensureEnvVar(resolvedRoot); + + logger.success('Authentication setup complete!'); + console.log('\nNext steps:'); + console.log(' 1. Set AUTH_SECRET in .env'); + console.log(' 2. Run: bun run db:push'); + console.log(' 3. Use requireAuth middleware on protected routes'); +} diff --git a/betterbase/packages/cli/src/commands/dev.ts b/betterbase/packages/cli/src/commands/dev.ts new file mode 100644 index 0000000..3720944 --- /dev/null +++ b/betterbase/packages/cli/src/commands/dev.ts @@ -0,0 +1,51 @@ +import { existsSync, watch } from 'node:fs'; +import path from 'node:path'; +import { ContextGenerator } from '../utils/context-generator'; +import * as logger from '../utils/logger'; + +export async function runDevCommand(projectRoot: string = process.cwd()): Promise { + const generator = new ContextGenerator(); + + await generator.generate(projectRoot); + + const watchPaths = [path.join(projectRoot, 'src/db/schema.ts'), path.join(projectRoot, 'src/routes')]; + const timers = new Map>(); + + for (const watchPath of watchPaths) { + if (!existsSync(watchPath)) { + logger.warn(`Watch path does not exist; skipping: ${watchPath}`); + continue; + } + + try { + watch(watchPath, { recursive: true }, (_eventType, filename) => { + console.log(`šŸ“ File changed: ${String(filename ?? '')}`); + + const existing = timers.get(watchPath); + if (existing) { + clearTimeout(existing); + } + + const timer = setTimeout(async () => { + console.log('šŸ”„ Regenerating context...'); + const start = Date.now(); + + try { + await generator.generate(projectRoot); + console.log(`āœ… Context updated in ${Date.now() - start}ms`); + } catch (error) { + const message = error instanceof Error ? error.message : String(error); + console.error(`āŒ Failed to regenerate context: ${message}`); + } + }, 250); + + timers.set(watchPath, timer); + }); + } catch (error) { + const message = error instanceof Error ? error.message : String(error); + logger.warn(`Failed to watch path ${watchPath}: ${message}`); + } + } + + console.log('šŸ‘€ Watching for schema and route changes...'); +} diff --git a/betterbase/packages/cli/src/commands/init.ts b/betterbase/packages/cli/src/commands/init.ts index 5859b89..abe5481 100644 --- a/betterbase/packages/cli/src/commands/init.ts +++ b/betterbase/packages/cli/src/commands/init.ts @@ -63,8 +63,8 @@ async function initializeGitRepository(projectPath: string): Promise { function buildPackageJson(projectName: string, databaseMode: DatabaseMode, useAuth: boolean): string { const dependencies: Record = { hono: '^4.11.9', - 'drizzle-orm': '^0.44.5', - zod: '^3.25.76', + 'drizzle-orm': '^0.45.1', + zod: '^4.3.6', }; if (databaseMode === 'turso') { @@ -338,19 +338,20 @@ import { logger } from 'hono/logger'; import { HTTPException } from 'hono/http-exception'; import { healthRoute } from './health'; import { usersRoute } from './users'; +import { env } from '../lib/env'; -export default function registerRoutes(app: Hono): void { +export function registerRoutes(app: Hono): void { app.use('*', cors()); app.use('*', logger()); app.onError((err, c) => { const isHttpError = err instanceof HTTPException; - const showDetailedError = process.env.NODE_ENV === 'development' || isHttpError; + const showDetailedError = env.NODE_ENV === 'development' || isHttpError; return c.json( { error: showDetailedError ? err.message : 'Internal Server Error', - stack: process.env.NODE_ENV === 'development' ? err.stack : undefined, + stack: env.NODE_ENV === 'development' ? err.stack : undefined, details: isHttpError ? (err as { cause?: unknown }).cause ?? null : null, }, isHttpError ? err.status : 500, @@ -374,6 +375,19 @@ async function writeProjectFiles( await mkdir(path.join(projectPath, 'src/middleware'), { recursive: true }); await mkdir(path.join(projectPath, 'src/lib'), { recursive: true }); + + await writeFile( + path.join(projectPath, 'src/lib/env.ts'), + `import { z } from 'zod'; + +const envSchema = z.object({ + NODE_ENV: z.enum(['development', 'test', 'production']).default('development'), +}); + +export const env = envSchema.parse(process.env); +`, + ); + await writeFile( path.join(projectPath, 'betterbase.config.ts'), `export default { @@ -427,7 +441,6 @@ bun.lockb .env.* !.env.example local.db -.drizzle `, ); @@ -447,7 +460,7 @@ export const healthRoute = new Hono(); healthRoute.get('/', async (c) => { try { - await db.run(sql\`select 1\`); + await db.${databaseMode === 'local' ? 'run' : 'execute'}(sql\`select 1\`); return c.json({ status: 'healthy', @@ -471,9 +484,9 @@ healthRoute.get('/', async (c) => { await writeFile( path.join(projectPath, 'src/middleware/validation.ts'), `import { HTTPException } from 'hono/http-exception'; -import type { ZodType } from 'zod'; +import { z } from 'zod'; -export function parseBody(schema: ZodType, body: unknown): T { +export function parseBody(schema: S, body: unknown): z.output { const result = schema.safeParse(body); if (!result.success) { @@ -510,9 +523,60 @@ const createUserSchema = z.object({ export const usersRoute = new Hono(); +const DEFAULT_LIMIT = 25; +const MAX_LIMIT = 100; +const DEFAULT_OFFSET = 0; + +function parseNonNegativeInt(value: string | undefined, fallback: number): number { + if (!value) { + return fallback; + } + + const parsed = Number(value); + if (!Number.isInteger(parsed) || parsed < 0) { + return fallback; + } + + return parsed; +} + usersRoute.get('/', async (c) => { - const allUsers = await db.select().from(users); - return c.json({ users: allUsers }); + const requestedLimit = parseNonNegativeInt(c.req.query('limit'), DEFAULT_LIMIT); + const limit = Math.min(requestedLimit, MAX_LIMIT); + const offset = parseNonNegativeInt(c.req.query('offset'), DEFAULT_OFFSET); + + if (limit === 0) { + return c.json({ + users: [], + pagination: { + limit, + offset, + hasMore: false, + }, + }); + } + + try { + const rows = await db.select().from(users).limit(limit + 1).offset(offset); + const hasMore = rows.length > limit; + const paginatedUsers = rows.slice(0, limit); + + return c.json({ + users: paginatedUsers, + pagination: { + limit, + offset, + hasMore, + }, + }); + } catch (error) { + if (error instanceof HTTPException) { + throw error; + } + + console.error('Failed to fetch users:', error); + throw error; + } }); usersRoute.post('/', async (c) => { @@ -545,7 +609,7 @@ usersRoute.post('/', async (c) => { await writeFile( path.join(projectPath, 'src/index.ts'), `import { Hono } from 'hono'; -import registerRoutes from './routes'; +import { registerRoutes } from './routes'; const app = new Hono(); registerRoutes(app); diff --git a/betterbase/packages/cli/src/commands/migrate.ts b/betterbase/packages/cli/src/commands/migrate.ts index 602915b..8999867 100644 --- a/betterbase/packages/cli/src/commands/migrate.ts +++ b/betterbase/packages/cli/src/commands/migrate.ts @@ -1,32 +1,352 @@ +import { Database } from 'bun:sqlite'; +import chalk from 'chalk'; +import { access, mkdir, readdir } from 'node:fs/promises'; +import path from 'node:path'; import { z } from 'zod'; +import { DEFAULT_DB_PATH } from '../constants'; import * as logger from '../utils/logger'; import * as prompts from '../utils/prompts'; const migrateOptionsSchema = z.object({ - destructive: z.boolean().optional(), + preview: z.boolean().optional(), + production: z.boolean().optional(), }); export type MigrateCommandOptions = z.infer; -/** - * Run the `bb migrate` command. - */ +export type MigrationChangeType = + | 'create_table' + | 'add_column' + | 'modify_column' + | 'drop_column' + | 'drop_table'; + +export interface MigrationChange { + type: MigrationChangeType; + table: string; + column?: string; + detail?: string; + isDestructive: boolean; +} + +interface DrizzleResult { + success: boolean; + stdout: string; + stderr: string; +} + +interface MigrationBackup { + sourcePath: string; + backupPath: string; +} + +const DRIZZLE_DIR = 'drizzle'; + +async function runDrizzleKit(args: string[]): Promise { + const proc = Bun.spawn(['bunx', 'drizzle-kit', ...args], { + cwd: process.cwd(), + stdout: 'pipe', + stderr: 'pipe', + }); + + const [stdout, stderr, exitCode] = await Promise.all([ + new Response(proc.stdout).text(), + new Response(proc.stderr).text(), + proc.exited, + ]); + + return { + success: exitCode === 0, + stdout, + stderr, + }; +} + +async function listSqlFiles(baseDir: string): Promise> { + const entries = new Map(); + const root = path.join(process.cwd(), baseDir); + + const walk = async (dir: string): Promise => { + try { + await access(dir); + } catch { + return; + } + + for (const entry of await readdir(dir, { withFileTypes: true })) { + const fullPath = path.join(dir, entry.name); + + if (entry.isDirectory()) { + await walk(fullPath); + continue; + } + + if (!fullPath.endsWith('.sql')) { + continue; + } + + entries.set(path.relative(root, fullPath), await Bun.file(fullPath).text()); + } + }; + + await walk(root); + return entries; +} + +function analyzeMigration(sqlStatements: string[]): MigrationChange[] { + const changes: MigrationChange[] = []; + + for (const statement of sqlStatements) { + const sql = statement.trim(); + if (!sql) { + continue; + } + + const createTable = sql.match(/create\s+table\s+"?([\w.-]+)"?/i); + if (createTable) { + changes.push({ type: 'create_table', table: createTable[1], isDestructive: false, detail: sql }); + continue; + } + + const dropTable = sql.match(/drop\s+table\s+"?([\w.-]+)"?/i); + if (dropTable) { + changes.push({ type: 'drop_table', table: dropTable[1], isDestructive: true, detail: sql }); + continue; + } + + const addColumn = sql.match(/alter\s+table\s+"?([\w.-]+)"?\s+add\s+column\s+"?([\w.-]+)"?/i); + if (addColumn) { + changes.push({ + type: 'add_column', + table: addColumn[1], + column: addColumn[2], + isDestructive: false, + detail: sql, + }); + continue; + } + + const dropColumn = sql.match(/alter\s+table\s+"?([\w.-]+)"?\s+drop\s+column\s+"?([\w.-]+)"?/i); + if (dropColumn) { + changes.push({ + type: 'drop_column', + table: dropColumn[1], + column: dropColumn[2], + isDestructive: true, + detail: sql, + }); + continue; + } + + const alterColumn = sql.match( + /alter\s+table\s+"?([\w.-]+)"?\s+(alter\s+column\s+"?([\w.-]+)"?|rename\s+column\s+"?([\w.-]+)"?)/i, + ); + if (alterColumn) { + changes.push({ + type: 'modify_column', + table: alterColumn[1], + column: alterColumn[3] ?? alterColumn[4], + isDestructive: /drop\s+not\s+null|set\s+not\s+null|set\s+data\s+type|rename\s+column/i.test(sql), + detail: sql, + }); + continue; + } + } + + return changes; +} + +function displayDiff(changes: MigrationChange[]): void { + console.log('\nšŸ“Š Migration Preview\n'); + + if (changes.length === 0) { + console.log(chalk.gray('No schema changes detected.')); + return; + } + + const newTables = changes.filter((c) => c.type === 'create_table'); + const newColumns = changes.filter((c) => c.type === 'add_column'); + const modified = changes.filter((c) => c.type === 'modify_column'); + const destructive = changes.filter((c) => c.isDestructive); + + if (newTables.length > 0) { + console.log(chalk.green('āœ… New Tables:')); + for (const change of newTables) { + console.log(chalk.green(` + ${change.table}`)); + } + console.log(''); + } + + if (newColumns.length > 0) { + console.log(chalk.green('āœ… New Columns:')); + for (const change of newColumns) { + console.log(chalk.green(` + ${change.table}.${change.column ?? ''}`)); + } + console.log(''); + } + + if (modified.length > 0) { + console.log(chalk.yellow('āš ļø Modified Columns:')); + for (const change of modified) { + console.log(chalk.yellow(` ! ${change.table}.${change.column ?? ''}`)); + } + console.log(''); + } + + if (destructive.length > 0) { + console.log(chalk.red('āŒ Destructive Changes:')); + for (const change of destructive) { + console.log(chalk.red(` - ${change.type}: ${change.table}${change.column ? `.${change.column}` : ''}`)); + console.log(chalk.red(' āš ļø This will DELETE DATA')); + } + console.log(''); + } +} + +async function confirmDestructive(changes: MigrationChange[]): Promise { + const destructive = changes.filter((c) => c.isDestructive); + if (destructive.length === 0) { + return true; + } + + logger.warn('DESTRUCTIVE CHANGES DETECTED:'); + for (const change of destructive) { + console.log(` - ${change.type}: ${change.table}${change.column ? `.${change.column}` : ''}`); + } + + const confirmation = await prompts.text({ message: 'Type "delete data" to confirm:' }); + if (confirmation !== 'delete data') { + logger.warn('Confirmation phrase mismatch. Migration cancelled.'); + return false; + } + + return true; +} + +async function backupDatabase(): Promise { + const sourcePath = process.env.DB_PATH ?? DEFAULT_DB_PATH; + + try { + await access(sourcePath); + } catch { + logger.warn(`No local database found at ${sourcePath}; skipping backup.`); + return null; + } + + const timestamp = new Date().toISOString().replace(/:/g, '-'); + const backupDir = path.join(process.cwd(), 'backups'); + await mkdir(backupDir, { recursive: true }); + + const backupPath = path.join(backupDir, `db-${timestamp}.sqlite`); + + const db = new Database(sourcePath, { readonly: true }); + try { + const snapshot = db.serialize(); + await Bun.write(backupPath, snapshot); + } finally { + db.close(); + } + + logger.success(`Backup saved: ${backupPath}`); + return { sourcePath, backupPath }; +} + +async function restoreBackup(backup: MigrationBackup | null): Promise { + if (backup === null) { + return; + } + + const bytes = await Bun.file(backup.backupPath).bytes(); + await Bun.write(backup.sourcePath, bytes); + + logger.warn(`Rollback complete. Restored database from ${backup.backupPath}`); +} + +function splitStatements(sql: string): string[] { + return sql + .split(/;\s*/g) + .map((statement) => statement.trim()) + .filter((statement) => statement.length > 0); +} + +async function collectChangesFromGenerate(): Promise { + const before = await listSqlFiles(DRIZZLE_DIR); + const generate = await runDrizzleKit(['generate']); + + if (!generate.success) { + if (/conflict|merge/i.test(generate.stderr)) { + throw new Error(`Migration conflict detected. Resolve migration files manually.\n${generate.stderr}`); + } + + throw new Error(`Failed to generate migrations.\n${generate.stderr || generate.stdout}`); + } + + const after = await listSqlFiles(DRIZZLE_DIR); + const changedSql: string[] = []; + + for (const [relativePath, content] of after.entries()) { + const previous = before.get(relativePath); + if (previous === content) { + continue; + } + + changedSql.push(...splitStatements(content)); + } + + return analyzeMigration(changedSql); +} + export async function runMigrateCommand(rawOptions: MigrateCommandOptions): Promise { const options = migrateOptionsSchema.parse(rawOptions); - const shouldContinue = - options.destructive === true - ? true - : await prompts.confirm({ - message: 'This migration may include destructive changes. Continue?', - initial: false, - }); + logger.info('Generating migration files with drizzle-kit...'); + const changes = await collectChangesFromGenerate(); + displayDiff(changes); - if (!shouldContinue) { - logger.warn('Migration cancelled by user.'); + if (options.preview) { + logger.info('Preview mode enabled. No migrations applied.'); return; } - logger.info('Analyzing migration plan...'); - logger.success('Migration scaffold complete. (Placeholder implementation)'); + if (options.production) { + const proceed = await prompts.confirm({ + message: 'Apply migrations to production now?', + initial: false, + }); + + if (!proceed) { + logger.warn('Migration cancelled by user.'); + return; + } + } + + let backup: MigrationBackup | null = null; + + if (changes.some((change) => change.isDestructive)) { + backup = await backupDatabase(); + + const confirmed = await confirmDestructive(changes); + if (!confirmed) { + return; + } + } + + logger.info('Applying migrations with drizzle-kit push...'); + const push = await runDrizzleKit(['push']); + + if (!push.success) { + await restoreBackup(backup); + + if (/\b(?:connect(?:ion)?|econnrefused|econnreset|enotfound|etimedout)\b/i.test(push.stderr)) { + throw new Error(`Database connection failed while applying migration.\n${push.stderr}`); + } + + if (/conflict|merge/i.test(push.stderr)) { + throw new Error(`Migration conflict detected during push. Please resolve and retry.\n${push.stderr}`); + } + + throw new Error(`Migration push failed.\n${push.stderr || push.stdout}`); + } + + logger.success('Migration complete!'); } diff --git a/betterbase/packages/cli/src/constants.ts b/betterbase/packages/cli/src/constants.ts new file mode 100644 index 0000000..8daff7f --- /dev/null +++ b/betterbase/packages/cli/src/constants.ts @@ -0,0 +1 @@ +export const DEFAULT_DB_PATH = 'local.db'; diff --git a/betterbase/packages/cli/src/index.ts b/betterbase/packages/cli/src/index.ts index e0136da..3e6a812 100644 --- a/betterbase/packages/cli/src/index.ts +++ b/betterbase/packages/cli/src/index.ts @@ -1,6 +1,8 @@ import { Command, CommanderError } from 'commander'; import { runInitCommand } from './commands/init'; +import { runDevCommand } from './commands/dev'; import { runMigrateCommand } from './commands/migrate'; +import { runAuthSetupCommand } from './commands/auth'; import * as logger from './utils/logger'; import packageJson from '../package.json'; @@ -24,12 +26,45 @@ export function createProgram(): Command { await runInitCommand({ projectName }); }); + + program + .command('dev') + .description('Watch schema/routes and regenerate .betterbase-context.json') + .argument('[project-root]', 'project root directory', process.cwd()) + .action(async (projectRoot: string) => { + await runDevCommand(projectRoot); + }); + + + const auth = program.command('auth').description('Authentication helpers'); + + auth + .command('setup') + .description('Install and scaffold BetterAuth integration') + .argument('[project-root]', 'project root directory', process.cwd()) + .action(async (projectRoot: string) => { + await runAuthSetupCommand(projectRoot); + }); + program .command('migrate') - .description('Run BetterBase database migrations') - .option('--destructive', 'allow destructive migration flow') - .action(async (options: { destructive?: boolean }) => { - await runMigrateCommand({ destructive: options.destructive }); + .description('Generate and apply migrations for local development') + .action(async () => { + await runMigrateCommand({}); + }); + + program + .command('migrate:preview') + .description('Preview migration diff without applying changes') + .action(async () => { + await runMigrateCommand({ preview: true }); + }); + + program + .command('migrate:production') + .description('Apply migrations to production (requires confirmation)') + .action(async () => { + await runMigrateCommand({ production: true }); }); return program; diff --git a/betterbase/packages/cli/src/utils/context-generator.ts b/betterbase/packages/cli/src/utils/context-generator.ts new file mode 100644 index 0000000..aabf0f4 --- /dev/null +++ b/betterbase/packages/cli/src/utils/context-generator.ts @@ -0,0 +1,84 @@ +import { existsSync, writeFileSync } from 'node:fs'; +import path from 'node:path'; +import { RouteScanner, type RouteInfo } from './route-scanner'; +import { SchemaScanner, type TableInfo } from './schema-scanner'; +import * as logger from './logger'; + +export interface BetterBaseContext { + version: string; + generated_at: string; + tables: Record; + routes: Record; + ai_prompt: string; +} + +export class ContextGenerator { + async generate(projectRoot: string): Promise { + const schemaPath = path.join(projectRoot, 'src/db/schema.ts'); + const routesPath = path.join(projectRoot, 'src/routes'); + + let tables: Record = {}; + let routes: Record = {}; + + if (existsSync(schemaPath)) { + const schemaScanner = new SchemaScanner(schemaPath); + tables = schemaScanner.scan(); + } else { + logger.warn(`Schema file not found; continuing with empty tables: ${schemaPath}`); + } + + if (existsSync(routesPath)) { + const routeScanner = new RouteScanner(); + routes = routeScanner.scan(routesPath); + } else { + logger.warn(`Routes directory not found; continuing with empty routes: ${routesPath}`); + } + + const context: BetterBaseContext = { + version: '1.0.0', + generated_at: new Date().toISOString(), + tables, + routes, + ai_prompt: this.generateAIPrompt(tables, routes), + }; + + const outputPath = path.join(projectRoot, '.betterbase-context.json'); + writeFileSync(outputPath, `${JSON.stringify(context, null, 2)}\n`); + console.log(`āœ… Generated ${outputPath}`); + + return context; + } + + private generateAIPrompt(tables: Record, routes: Record): string { + const tableNames = Object.keys(tables); + const routeCount = Object.values(routes).reduce((count, methods) => count + methods.length, 0); + + let prompt = `This is a BetterBase backend project with ${tableNames.length} tables and ${routeCount} API endpoints.\n\n`; + + prompt += 'DATABASE SCHEMA:\n'; + for (const tableName of tableNames) { + const table = tables[tableName]; + const columns = Object.keys(table.columns ?? {}).join(', '); + prompt += `- ${tableName}: ${columns}\n`; + if (table.relations.length > 0) { + prompt += ` Relations: ${table.relations.join(', ')}\n`; + } + } + + prompt += '\nAPI ENDPOINTS:\n'; + for (const [routePath, methods] of Object.entries(routes)) { + for (const route of methods) { + const auth = route.requiresAuth ? ' [AUTH REQUIRED]' : ''; + prompt += `- ${route.method} ${routePath}${auth}\n`; + } + } + + prompt += '\nWhen writing code for this project:\n'; + prompt += "1. Always import tables from './src/db/schema'\n"; + prompt += '2. Use Drizzle ORM for database queries\n'; + prompt += '3. Validate inputs with Zod\n'; + prompt += '4. Return JSON responses with proper status codes\n'; + + return prompt; + } +} diff --git a/betterbase/packages/cli/src/utils/route-scanner.ts b/betterbase/packages/cli/src/utils/route-scanner.ts new file mode 100644 index 0000000..5fc7aa9 --- /dev/null +++ b/betterbase/packages/cli/src/utils/route-scanner.ts @@ -0,0 +1,165 @@ +import { readdirSync, readFileSync } from 'node:fs'; +import path from 'node:path'; +import * as ts from 'typescript'; + +export interface RouteInfo { + method: string; + path: string; + requiresAuth: boolean; + inputSchema?: string; + outputSchema?: string; +} + +function getStringLiteral(node: ts.Node | undefined): string { + if (!node) return ''; + if (ts.isStringLiteral(node) || ts.isNoSubstitutionTemplateLiteral(node)) { + return node.text; + } + return node.getText(); +} + +function isAuthLikeName(value: string): boolean { + return /\bauth\b/i.test(value) || /^auth/i.test(value) || /^(authMiddleware|requireAuth)$/i.test(value); +} + +function collectTsFiles(dir: string): string[] { + const files: string[] = []; + + const walk = (current: string): void => { + let entries: ReturnType; + try { + entries = readdirSync(current, { withFileTypes: true }); + } catch { + return; + } + + for (const entry of entries) { + const fullPath = path.join(current, entry.name); + if (entry.isDirectory()) { + walk(fullPath); + continue; + } + + if (entry.isFile() && entry.name.endsWith('.ts') && !entry.name.endsWith('.d.ts')) { + files.push(fullPath); + } + } + }; + + walk(dir); + return files; +} + +export class RouteScanner { + scan(routesDir: string): Record { + const files = collectTsFiles(routesDir); + const routes: Record = {}; + + for (const file of files) { + const fileRoutes = this.scanFile(file); + for (const [routePath, entries] of Object.entries(fileRoutes)) { + routes[routePath] = [...(routes[routePath] ?? []), ...entries]; + } + } + + return routes; + } + + private scanFile(filePath: string): Record { + const sourceCode = readFileSync(filePath, 'utf-8'); + const sourceFile = ts.createSourceFile(filePath, sourceCode, ts.ScriptTarget.Latest, true, ts.ScriptKind.TS); + + const routes: Record = {}; + const authIdentifiers = new Set(); + + const isAuthMiddlewareExpression = (expr: ts.Expression): boolean => { + if (ts.isIdentifier(expr)) { + return authIdentifiers.has(expr.text) || isAuthLikeName(expr.text); + } + + if (ts.isPropertyAccessExpression(expr)) { + const text = expr.getText(sourceFile); + return isAuthLikeName(text); + } + + return false; + }; + + const collectAuthIdentifiers = (node: ts.Node): void => { + if (!ts.isVariableStatement(node)) return; + + for (const declaration of node.declarationList.declarations) { + if (!ts.isIdentifier(declaration.name) || !declaration.initializer) continue; + const initializer = declaration.initializer; + if (ts.isCallExpression(initializer) && ts.isIdentifier(initializer.expression)) { + if (initializer.expression.text === 'createMiddleware' || initializer.expression.text === 'requireAuth') { + authIdentifiers.add(declaration.name.text); + } + } + + if (isAuthLikeName(declaration.name.text)) { + authIdentifiers.add(declaration.name.text); + } + } + }; + + ts.forEachChild(sourceFile, collectAuthIdentifiers); + + const visit = (node: ts.Node): void => { + if (ts.isCallExpression(node) && ts.isPropertyAccessExpression(node.expression)) { + const method = node.expression.name.text.toLowerCase(); + const httpMethods = new Set(['get', 'post', 'put', 'patch', 'delete', 'options', 'head']); + + if (httpMethods.has(method)) { + const [pathArg, ...handlerArgs] = node.arguments; + const routePath = getStringLiteral(pathArg); + + let requiresAuth = false; + for (const arg of handlerArgs) { + if (isAuthMiddlewareExpression(arg)) { + requiresAuth = true; + break; + } + } + + const route: RouteInfo = { + method: method.toUpperCase(), + path: routePath, + requiresAuth, + inputSchema: this.findSchemaUsage(sourceFile, handlerArgs, 'input'), + outputSchema: this.findSchemaUsage(sourceFile, handlerArgs, 'output'), + }; + + if (!routes[routePath]) { + routes[routePath] = []; + } + + routes[routePath].push(route); + } + } + + ts.forEachChild(node, visit); + }; + + visit(sourceFile); + return routes; + } + + private findSchemaUsage(sourceFile: ts.SourceFile, args: ts.NodeArray, mode: 'input' | 'output'): string | undefined { + const text = args.map((arg) => arg.getText(sourceFile)).join('\n'); + + if (mode === 'input') { + const parseMatch = text.match(/([A-Za-z0-9_]+Schema)\.(safeParse|parse)\(/); + if (parseMatch) return parseMatch[1]; + const middlewareMatch = text.match(/parseBody\(([^,]+),/); + if (middlewareMatch) return middlewareMatch[1].trim(); + } + + if (mode === 'output') { + const outputMatch = text.match(/([A-Za-z0-9_]+Schema)\.(parse|safeParse)\([^)]*c\.json/); + if (outputMatch) return outputMatch[1]; + } + + return undefined; + } +} diff --git a/betterbase/packages/cli/src/utils/scanner.ts b/betterbase/packages/cli/src/utils/scanner.ts new file mode 100644 index 0000000..0dee86d --- /dev/null +++ b/betterbase/packages/cli/src/utils/scanner.ts @@ -0,0 +1,252 @@ +import { readFileSync } from 'node:fs'; +import * as ts from 'typescript'; + +export interface ColumnInfo { + name: string; + type: string; + nullable: boolean; + unique: boolean; + primaryKey: boolean; + defaultValue?: string; + references?: string; +} + +export interface TableInfo { + name: string; + columns: Record; + relations: string[]; + indexes: string[]; +} + +function unwrapExpression(expression: ts.Expression): ts.Expression { + let current = expression; + + while ( + ts.isParenthesizedExpression(current) || + ts.isAsExpression(current) || + ts.isTypeAssertionExpression(current) || + ts.isSatisfiesExpression(current) + ) { + current = (current as ts.ParenthesizedExpression | ts.AsExpression | ts.TypeAssertion | ts.SatisfiesExpression) + .expression; + } + + return current; +} + +function getCallName(call: ts.CallExpression): string { + if (ts.isIdentifier(call.expression)) { + return call.expression.text; + } + + if (ts.isPropertyAccessExpression(call.expression)) { + return call.expression.name.text; + } + + return ''; +} + +function getExpressionText(sourceFile: ts.SourceFile, node: ts.Node | undefined): string { + if (!node) { + return ''; + } + + return node.getText(sourceFile); +} + +export class SchemaScanner { + private readonly sourceFile: ts.SourceFile; + + constructor(schemaPath: string) { + let sourceCode: string; + + try { + sourceCode = readFileSync(schemaPath, 'utf-8'); + } catch (error) { + const message = error instanceof Error ? error.message : String(error); + throw new Error(`Failed to read schema file at ${schemaPath}: ${message}`); + } + + this.sourceFile = ts.createSourceFile(schemaPath, sourceCode, ts.ScriptTarget.Latest, true, ts.ScriptKind.TS); + } + + scan(): Record { + const tables: Record = {}; + + const visit = (node: ts.Node): void => { + if (ts.isVariableStatement(node)) { + for (const declaration of node.declarationList.declarations) { + if (!ts.isIdentifier(declaration.name) || !declaration.initializer) { + continue; + } + + const initializer = unwrapExpression(declaration.initializer); + if (!ts.isCallExpression(initializer)) { + continue; + } + + const functionName = getCallName(initializer); + if (functionName === 'sqliteTable' || functionName === 'pgTable' || functionName === 'mysqlTable') { + tables[declaration.name.text] = this.parseTable(initializer); + } + } + } + + ts.forEachChild(node, visit); + }; + + visit(this.sourceFile); + return tables; + } + + private parseTable(callExpression: ts.CallExpression): TableInfo { + const [nameArg, columnsArg, indexesArg] = callExpression.arguments; + const tableName = ts.isStringLiteral(nameArg) ? nameArg.text : getExpressionText(this.sourceFile, nameArg); + + const columns: Record = {}; + const relations: string[] = []; + + if (columnsArg && ts.isObjectLiteralExpression(columnsArg)) { + for (const property of columnsArg.properties) { + if (!ts.isPropertyAssignment(property)) { + continue; + } + + const columnName = ts.isIdentifier(property.name) + ? property.name.text + : ts.isStringLiteral(property.name) + ? property.name.text + : property.name.getText(this.sourceFile); + + const columnInfo = this.parseColumn(columnName, property.initializer); + columns[columnName] = columnInfo; + + if (columnInfo.references) { + relations.push(columnInfo.references); + } + } + } + + const indexes = this.parseIndexes(indexesArg); + + return { + name: tableName, + columns, + relations, + indexes, + }; + } + + private parseIndexes(indexesArg: ts.Expression | undefined): string[] { + if (!indexesArg) { + return []; + } + + const indexes: string[] = []; + const indexRoot = unwrapExpression(indexesArg); + + const collectFromObject = (obj: ts.ObjectLiteralExpression): void => { + for (const property of obj.properties) { + if (!ts.isPropertyAssignment(property)) { + continue; + } + + const value = unwrapExpression(property.initializer); + if (!ts.isCallExpression(value)) { + continue; + } + + const callName = getCallName(value); + if (callName === 'index' || callName === 'uniqueIndex') { + const key = ts.isIdentifier(property.name) + ? property.name.text + : ts.isStringLiteral(property.name) + ? property.name.text + : property.name.getText(this.sourceFile); + indexes.push(key); + } + } + }; + + if (ts.isArrowFunction(indexRoot) || ts.isFunctionExpression(indexRoot)) { + const body = indexRoot.body; + if (ts.isObjectLiteralExpression(body)) { + collectFromObject(body); + } + + if (ts.isBlock(body)) { + for (const statement of body.statements) { + if (!ts.isReturnStatement(statement) || !statement.expression) { + continue; + } + + const expression = unwrapExpression(statement.expression); + if (ts.isObjectLiteralExpression(expression)) { + collectFromObject(expression); + } + } + } + } + + return indexes; + } + + private parseColumn(columnName: string, expression: ts.Expression): ColumnInfo { + let type = 'unknown'; + let nullable = true; + let unique = false; + let primaryKey = false; + let defaultValue: string | undefined; + let references: string | undefined; + + let current = unwrapExpression(expression); + + while (ts.isCallExpression(current)) { + const methodName = getCallName(current); + + if (methodName === 'text' || methodName === 'varchar' || methodName === 'char') { + type = 'text'; + } else if (methodName === 'integer' || methodName === 'int' || methodName === 'bigint' || methodName === 'serial') { + type = 'integer'; + } else if (methodName === 'real' || methodName === 'numeric' || methodName === 'decimal' || methodName === 'doublePrecision') { + type = 'number'; + } else if (methodName === 'boolean') { + type = 'boolean'; + } else if (methodName === 'timestamp' || methodName === 'datetime') { + type = 'datetime'; + } else if (methodName === 'json' || methodName === 'jsonb') { + type = 'json'; + } else if (methodName === 'blob') { + type = 'blob'; + } else if (methodName === 'notNull') { + nullable = false; + } else if (methodName === 'unique') { + unique = true; + } else if (methodName === 'primaryKey') { + primaryKey = true; + nullable = false; + } else if (methodName.startsWith('default')) { + defaultValue = getExpressionText(this.sourceFile, current.arguments[0]); + } else if (methodName === 'references') { + references = getExpressionText(this.sourceFile, current.arguments[0]); + } + + if (ts.isPropertyAccessExpression(current.expression)) { + current = unwrapExpression(current.expression.expression); + continue; + } + + break; + } + + return { + name: columnName, + type, + nullable, + unique, + primaryKey, + defaultValue, + references, + }; + } +} diff --git a/betterbase/packages/cli/src/utils/schema-scanner.ts b/betterbase/packages/cli/src/utils/schema-scanner.ts new file mode 100644 index 0000000..ff8ea30 --- /dev/null +++ b/betterbase/packages/cli/src/utils/schema-scanner.ts @@ -0,0 +1,2 @@ +export { SchemaScanner } from './scanner'; +export type { ColumnInfo, TableInfo } from './scanner'; diff --git a/betterbase/packages/cli/test/context-generator.test.ts b/betterbase/packages/cli/test/context-generator.test.ts new file mode 100644 index 0000000..2499430 --- /dev/null +++ b/betterbase/packages/cli/test/context-generator.test.ts @@ -0,0 +1,107 @@ +import { mkdirSync, mkdtempSync, readFileSync, rmSync, writeFileSync } from 'node:fs'; +import { tmpdir } from 'node:os'; +import path from 'node:path'; +import { describe, expect, test } from 'bun:test'; +import { ContextGenerator } from '../src/utils/context-generator'; + +describe('ContextGenerator', () => { + test('creates .betterbase-context.json from schema and routes', async () => { + const root = mkdtempSync(path.join(tmpdir(), 'bb-context-')); + + try { + mkdirSync(path.join(root, 'src/db'), { recursive: true }); + mkdirSync(path.join(root, 'src/routes'), { recursive: true }); + + writeFileSync( + path.join(root, 'src/db/schema.ts'), + ` + import { sqliteTable, text } from 'drizzle-orm/sqlite-core'; + export const users = sqliteTable('users', { + id: text('id').primaryKey(), + email: text('email').notNull(), + }); + `, + ); + + writeFileSync( + path.join(root, 'src/routes/index.ts'), + ` + import { Hono } from 'hono'; + const app = new Hono(); + app.get('/health', (c) => c.json({ ok: true })); + export default app; + `, + ); + + const generator = new ContextGenerator(); + const context = await generator.generate(root); + + expect(context.tables.users).toBeDefined(); + expect(context.tables.users.columns.id).toBeDefined(); + expect(context.tables.users.columns.email).toBeDefined(); + expect(context.routes['/health']).toBeDefined(); + + const file = JSON.parse(readFileSync(path.join(root, '.betterbase-context.json'), 'utf-8')); + expect(file.tables.users.name).toBe('users'); + expect(file.tables.users.columns.id.type).toBe('text'); + expect(file.tables.users.columns.email.type).toBe('text'); + expect(Array.isArray(file.routes['/health'])).toBe(true); + expect(file.routes['/health'].length).toBeGreaterThan(0); + expect(file.routes['/health'][0].method).toBe('GET'); + } finally { + rmSync(root, { recursive: true, force: true }); + } + }); + + test('handles missing routes directory with empty routes', async () => { + const root = mkdtempSync(path.join(tmpdir(), 'bb-context-no-routes-')); + + try { + mkdirSync(path.join(root, 'src/db'), { recursive: true }); + writeFileSync( + path.join(root, 'src/db/schema.ts'), + ` + import { sqliteTable, text } from 'drizzle-orm/sqlite-core'; + export const users = sqliteTable('users', { id: text('id').primaryKey() }); + `, + ); + + const context = await new ContextGenerator().generate(root); + expect(context.routes).toEqual({}); + + const file = JSON.parse(readFileSync(path.join(root, '.betterbase-context.json'), 'utf-8')); + expect(file.routes).toEqual({}); + } finally { + rmSync(root, { recursive: true, force: true }); + } + }); + + test('handles empty schema file with empty tables', async () => { + const root = mkdtempSync(path.join(tmpdir(), 'bb-context-empty-schema-')); + + try { + mkdirSync(path.join(root, 'src/db'), { recursive: true }); + mkdirSync(path.join(root, 'src/routes'), { recursive: true }); + writeFileSync(path.join(root, 'src/db/schema.ts'), 'export {};\n'); + + const context = await new ContextGenerator().generate(root); + expect(context.tables).toEqual({}); + } finally { + rmSync(root, { recursive: true, force: true }); + } + }); + + test('handles missing schema file with empty tables', async () => { + const root = mkdtempSync(path.join(tmpdir(), 'bb-context-no-schema-')); + + try { + mkdirSync(path.join(root, 'src/routes'), { recursive: true }); + writeFileSync(path.join(root, 'src/routes/index.ts'), 'export {};\n'); + + const context = await new ContextGenerator().generate(root); + expect(context.tables).toEqual({}); + } finally { + rmSync(root, { recursive: true, force: true }); + } + }); +}); diff --git a/betterbase/packages/cli/test/route-scanner.test.ts b/betterbase/packages/cli/test/route-scanner.test.ts new file mode 100644 index 0000000..9f56991 --- /dev/null +++ b/betterbase/packages/cli/test/route-scanner.test.ts @@ -0,0 +1,45 @@ +import { mkdirSync, mkdtempSync, rmSync, writeFileSync } from 'node:fs'; +import { tmpdir } from 'node:os'; +import path from 'node:path'; +import { describe, expect, test } from 'bun:test'; +import { RouteScanner } from '../src/utils/route-scanner'; + +describe('RouteScanner', () => { + test('extracts hono routes with auth and schemas', async () => { + const root = mkdtempSync(path.join(tmpdir(), 'bb-routes-')); + + try { + const routesDir = path.join(root, 'src/routes'); + mkdirSync(routesDir, { recursive: true }); + + writeFileSync( + path.join(routesDir, 'users.ts'), + ` + import { Hono } from 'hono'; + import { z } from 'zod'; + import { authMiddleware } from '../middleware/auth'; + + const createUserSchema = z.object({ email: z.string().email() }); + export const users = new Hono(); + + users.get('/users', authMiddleware, (c) => c.json({ users: [] })); + users.post('/users', async (c) => { + const body = await c.req.json(); + createUserSchema.parse(body); + return c.json({ ok: true }); + }); + `, + ); + + const scanner = new RouteScanner(); + const routes = scanner.scan(routesDir); + + expect(routes['/users']).toBeDefined(); + expect(routes['/users'].length).toBe(2); + expect(routes['/users'][0].requiresAuth).toBe(true); + expect(routes['/users'][1].inputSchema).toBe('createUserSchema'); + } finally { + rmSync(root, { recursive: true, force: true }); + } + }); +}); diff --git a/betterbase/packages/cli/test/scanner.test.ts b/betterbase/packages/cli/test/scanner.test.ts new file mode 100644 index 0000000..edf5594 --- /dev/null +++ b/betterbase/packages/cli/test/scanner.test.ts @@ -0,0 +1,61 @@ +import { mkdtempSync, rmSync, writeFileSync } from 'node:fs'; +import { tmpdir } from 'node:os'; +import path from 'node:path'; +import { describe, expect, test } from 'bun:test'; +import { SchemaScanner } from '../src/utils/scanner'; + +describe('SchemaScanner', () => { + test('extracts tables, columns, relations, and indexes from drizzle schema', () => { + const dir = mkdtempSync(path.join(tmpdir(), 'bb-scanner-')); + + try { + const schemaPath = path.join(dir, 'schema.ts'); + writeFileSync( + schemaPath, + ` + import { sqliteTable, text, integer, index } from 'drizzle-orm/sqlite-core'; + + export const users = sqliteTable('users', { + id: text('id').primaryKey(), + email: text('email').notNull().unique(), + age: integer('age').default(18), + }, (table) => ({ + usersEmailIdx: index('users_email_idx').on(table.email), + })); + + export const posts = sqliteTable('posts', { + id: text('id').primaryKey(), + userId: text('user_id').notNull().references(() => users.id), + title: text('title').notNull(), + }); + + export const comments = sqliteTable('comments', { + id: text('id').primaryKey(), + postId: text('post_id').notNull().references(() => posts.id), + body: text('body'), + }); + `, + ); + + const scanner = new SchemaScanner(schemaPath); + const tables = scanner.scan(); + + expect(Object.keys(tables)).toEqual(['users', 'posts', 'comments']); + + expect(tables.users.name).toBe('users'); + expect(tables.users.columns.id.primaryKey).toBe(true); + expect(tables.users.columns.id.nullable).toBe(false); + expect(tables.users.columns.email.unique).toBe(true); + expect(tables.users.columns.age.defaultValue).toBe('18'); + expect(tables.users.indexes).toContain('usersEmailIdx'); + + expect(tables.posts.columns.userId.references).toBe('() => users.id'); + expect(tables.posts.relations).toContain('() => users.id'); + + expect(tables.comments.columns.postId.references).toBe('() => posts.id'); + expect(tables.comments.relations).toContain('() => posts.id'); + } finally { + rmSync(dir, { recursive: true, force: true }); + } + }); +}); diff --git a/betterbase/packages/cli/test/smoke.test.ts b/betterbase/packages/cli/test/smoke.test.ts index f082470..9b65d64 100644 --- a/betterbase/packages/cli/test/smoke.test.ts +++ b/betterbase/packages/cli/test/smoke.test.ts @@ -14,9 +14,32 @@ describe('cli', () => { expect(init?.registeredArguments[0]?.name()).toBe('project-name'); }); - test('registers migrate command', () => { + + + test('registers auth setup command', () => { + const program = createProgram(); + const auth = program.commands.find((command) => command.name() === 'auth'); + expect(auth).toBeDefined(); + + const setup = auth?.commands.find((command) => command.name() === 'setup'); + expect(setup).toBeDefined(); + }); + + test('registers dev command', () => { const program = createProgram(); + const dev = program.commands.find((command) => command.name() === 'dev'); + expect(dev).toBeDefined(); + }); + + test('registers migrate commands', () => { + const program = createProgram(); + const migrate = program.commands.find((command) => command.name() === 'migrate'); + const preview = program.commands.find((command) => command.name() === 'migrate:preview'); + const production = program.commands.find((command) => command.name() === 'migrate:production'); + expect(migrate).toBeDefined(); + expect(preview).toBeDefined(); + expect(production).toBeDefined(); }); }); diff --git a/betterbase/templates/base/README.md b/betterbase/templates/base/README.md index e6d4cf1..8d481e3 100644 --- a/betterbase/templates/base/README.md +++ b/betterbase/templates/base/README.md @@ -26,3 +26,15 @@ src/ betterbase.config.ts drizzle.config.ts ``` + + +## Quick Start + +- Install dependencies: `bun install` +- Start development server: `bun run dev` +- Generate Drizzle migrations: `bun run db:generate` +- Apply migrations locally: `bun run db:push` +- Build for production: `bun run build` +- Start production server: `bun run start` + +Environment variables are validated in `src/lib/env.ts` (`NODE_ENV`, `PORT`, `DB_PATH`). diff --git a/betterbase/templates/base/package.json b/betterbase/templates/base/package.json index e631611..c74eee0 100644 --- a/betterbase/templates/base/package.json +++ b/betterbase/templates/base/package.json @@ -6,7 +6,9 @@ "dev": "bun --hot run src/index.ts", "db:generate": "drizzle-kit generate", "db:push": "bun run src/db/migrate.ts", - "typecheck": "tsc --noEmit" + "typecheck": "tsc --noEmit", + "build": "bun build src/index.ts --outfile dist/index.js --target bun", + "start": "bun run dist/index.js" }, "dependencies": { "hono": "^4.6.10", diff --git a/betterbase/templates/base/src/db/index.ts b/betterbase/templates/base/src/db/index.ts index ddbbc3f..485c7b4 100644 --- a/betterbase/templates/base/src/db/index.ts +++ b/betterbase/templates/base/src/db/index.ts @@ -1,8 +1,10 @@ import { Database } from 'bun:sqlite'; import { drizzle } from 'drizzle-orm/bun-sqlite'; +import { env } from '../lib/env'; import * as schema from './schema'; -const dbPath = process.env.DB_PATH ?? 'local.db'; -const sqlite = new Database(dbPath, { create: true }); + +// env.DB_PATH is always present because env schema provides a default. +const sqlite = new Database(env.DB_PATH, { create: true }); export const db = drizzle(sqlite, { schema }); diff --git a/betterbase/templates/base/src/index.ts b/betterbase/templates/base/src/index.ts index 4065ba5..359c1c5 100644 --- a/betterbase/templates/base/src/index.ts +++ b/betterbase/templates/base/src/index.ts @@ -1,4 +1,5 @@ import { Hono } from 'hono'; +import { env } from './lib/env'; import { registerRoutes } from './routes'; const app = new Hono(); @@ -6,8 +7,8 @@ registerRoutes(app); const server = Bun.serve({ fetch: app.fetch, - port: Number(process.env.PORT ?? 3000), - development: process.env.NODE_ENV === 'development', + port: env.PORT, + development: env.NODE_ENV === 'development', }); console.log(`šŸš€ Server running at http://localhost:${server.port}`); diff --git a/betterbase/templates/base/src/lib/env.ts b/betterbase/templates/base/src/lib/env.ts index c4a0b38..c61b432 100644 --- a/betterbase/templates/base/src/lib/env.ts +++ b/betterbase/templates/base/src/lib/env.ts @@ -1,8 +1,11 @@ import { z } from 'zod'; +export const DEFAULT_DB_PATH = 'local.db'; + const envSchema = z.object({ NODE_ENV: z.enum(['development', 'test', 'production']).default('development'), PORT: z.coerce.number().default(3000), + DB_PATH: z.string().min(1).default(DEFAULT_DB_PATH), }); export const env = envSchema.parse(process.env); diff --git a/betterbase/templates/base/src/routes/index.ts b/betterbase/templates/base/src/routes/index.ts index 64a9e83..2fb73e6 100644 --- a/betterbase/templates/base/src/routes/index.ts +++ b/betterbase/templates/base/src/routes/index.ts @@ -2,6 +2,7 @@ import { Hono } from 'hono'; import { cors } from 'hono/cors'; import { logger } from 'hono/logger'; import { HTTPException } from 'hono/http-exception'; +import { env } from '../lib/env'; import { healthRoute } from './health'; import { usersRoute } from './users'; @@ -11,12 +12,12 @@ export function registerRoutes(app: Hono): void { app.onError((err, c) => { const isHttpError = err instanceof HTTPException; - const showDetailedError = process.env.NODE_ENV === 'development' || isHttpError; + const showDetailedError = env.NODE_ENV === 'development' || isHttpError; return c.json( { error: showDetailedError ? err.message : 'Internal Server Error', - stack: process.env.NODE_ENV === 'development' ? err.stack : undefined, + stack: env.NODE_ENV === 'development' ? err.stack : undefined, details: isHttpError ? (err as { cause?: unknown }).cause ?? null : null, }, isHttpError ? err.status : 500, diff --git a/betterbase/templates/base/src/routes/users.ts b/betterbase/templates/base/src/routes/users.ts index 29f17c6..22fff3d 100644 --- a/betterbase/templates/base/src/routes/users.ts +++ b/betterbase/templates/base/src/routes/users.ts @@ -1,6 +1,6 @@ import { Hono } from 'hono'; import { HTTPException } from 'hono/http-exception'; -import { z } from 'zod'; +import { z, ZodError } from 'zod'; import { db } from '../db'; import { users } from '../db/schema'; import { parseBody } from '../middleware/validation'; @@ -10,11 +10,69 @@ export const createUserSchema = z.object({ name: z.string().min(1), }); +const DEFAULT_LIMIT = 25; +const MAX_LIMIT = 100; +const DEFAULT_OFFSET = 0; + +const paginationSchema = z.object({ + limit: z.coerce.number().int().nonnegative().default(DEFAULT_LIMIT), + offset: z.coerce.number().int().nonnegative().default(DEFAULT_OFFSET), +}); + export const usersRoute = new Hono(); usersRoute.get('/', async (c) => { - const allUsers = await db.select().from(users); - return c.json({ users: allUsers }); + try { + const pagination = paginationSchema.parse({ + limit: c.req.query('limit'), + offset: c.req.query('offset'), + }); + + const limit = Math.min(pagination.limit, MAX_LIMIT); + const offset = pagination.offset; + + if (limit === 0) { + return c.json({ + users: [], + pagination: { + limit, + offset, + // No DB query is run for limit=0, so hasMore cannot be determined. + hasMore: null, + }, + }); + } + + const rows = await db.select().from(users).limit(limit + 1).offset(offset); + const hasMore = rows.length > limit; + const paginatedUsers = rows.slice(0, limit); + + return c.json({ + users: paginatedUsers, + pagination: { + limit, + offset, + hasMore, + }, + }); + } catch (error) { + if (error instanceof HTTPException) { + throw error; + } + + if (error instanceof ZodError) { + return c.json( + { + error: 'Invalid pagination query parameters', + details: error.issues, + }, + 400, + ); + } + + console.error('Failed to fetch users:', error); + throw error; + } }); usersRoute.post('/', async (c) => { diff --git a/betterbase/tsconfig.base.json b/betterbase/tsconfig.base.json index b1d6a90..574a5e4 100644 --- a/betterbase/tsconfig.base.json +++ b/betterbase/tsconfig.base.json @@ -8,6 +8,8 @@ "skipLibCheck": true, "resolveJsonModule": true, "isolatedModules": true, - "forceConsistentCasingInFileNames": true + "forceConsistentCasingInFileNames": true, + "declaration": true, + "declarationDir": "dist/types" } } From a25d5ce88339f74d6b88268904958f8eb388de98 Mon Sep 17 00:00:00 2001 From: Ziad Khaled Date: Thu, 19 Feb 2026 04:50:43 +0200 Subject: [PATCH 10/12] Add bb generate crud command for typed route scaffolding --- betterbase/.gitignore | 7 +- betterbase/README.md | 12 + betterbase/apps/cli/tsconfig.json | 11 +- betterbase/package.json | 4 +- betterbase/packages/cli/package.json | 6 +- betterbase/packages/cli/src/build.ts | 18 +- betterbase/packages/cli/src/commands/auth.ts | 238 ++++++++++++ betterbase/packages/cli/src/commands/dev.ts | 51 +++ .../packages/cli/src/commands/generate.ts | 214 +++++++++++ betterbase/packages/cli/src/commands/init.ts | 88 ++++- .../packages/cli/src/commands/migrate.ts | 350 +++++++++++++++++- betterbase/packages/cli/src/constants.ts | 1 + betterbase/packages/cli/src/index.ts | 56 ++- .../cli/src/utils/context-generator.ts | 84 +++++ .../packages/cli/src/utils/route-scanner.ts | 165 +++++++++ betterbase/packages/cli/src/utils/scanner.ts | 252 +++++++++++++ .../packages/cli/src/utils/schema-scanner.ts | 2 + .../cli/test/context-generator.test.ts | 107 ++++++ .../packages/cli/test/route-scanner.test.ts | 45 +++ betterbase/packages/cli/test/scanner.test.ts | 61 +++ betterbase/packages/cli/test/smoke.test.ts | 35 +- betterbase/templates/base/README.md | 12 + betterbase/templates/base/package.json | 4 +- betterbase/templates/base/src/db/index.ts | 6 +- betterbase/templates/base/src/index.ts | 5 +- betterbase/templates/base/src/lib/env.ts | 3 + betterbase/templates/base/src/routes/index.ts | 5 +- betterbase/templates/base/src/routes/users.ts | 64 +++- betterbase/tsconfig.base.json | 4 +- 29 files changed, 1850 insertions(+), 60 deletions(-) create mode 100644 betterbase/packages/cli/src/commands/auth.ts create mode 100644 betterbase/packages/cli/src/commands/dev.ts create mode 100644 betterbase/packages/cli/src/commands/generate.ts create mode 100644 betterbase/packages/cli/src/constants.ts create mode 100644 betterbase/packages/cli/src/utils/context-generator.ts create mode 100644 betterbase/packages/cli/src/utils/route-scanner.ts create mode 100644 betterbase/packages/cli/src/utils/scanner.ts create mode 100644 betterbase/packages/cli/src/utils/schema-scanner.ts create mode 100644 betterbase/packages/cli/test/context-generator.test.ts create mode 100644 betterbase/packages/cli/test/route-scanner.test.ts create mode 100644 betterbase/packages/cli/test/scanner.test.ts diff --git a/betterbase/.gitignore b/betterbase/.gitignore index 51c8bb1..240b1f4 100644 --- a/betterbase/.gitignore +++ b/betterbase/.gitignore @@ -3,14 +3,17 @@ node_modules .turbo dist .next +*.sqlite3 +*.sqlite +*.db +*.tsbuildinfo +.betterbase-context.json .vscode/ .idea/ .env .env.* -.env.local -.env.test !.env.example *.log diff --git a/betterbase/README.md b/betterbase/README.md index 46bf456..ebc5905 100644 --- a/betterbase/README.md +++ b/betterbase/README.md @@ -19,6 +19,18 @@ Initial BetterBase monorepo scaffold with a concrete base template. - Workspace orchestration: **Turborepo** - Language: **TypeScript** + +## Monorepo Commands + +From the monorepo root: + +- `bun install` +- `bun run dev` +- `bun run build` +- `bun run typecheck` (runs `turbo run typecheck --filter '*'`) + +> Note: `templates/base` is not in the root workspace graph (`apps/*`, `packages/*`), so run template checks separately (e.g. `cd templates/base && bun run typecheck`). + ## Base Template Commands From `templates/base`: diff --git a/betterbase/apps/cli/tsconfig.json b/betterbase/apps/cli/tsconfig.json index 4031161..cd17ee5 100644 --- a/betterbase/apps/cli/tsconfig.json +++ b/betterbase/apps/cli/tsconfig.json @@ -3,7 +3,14 @@ "compilerOptions": { "outDir": "dist", "rootDir": ".", - "types": ["bun"] + "types": [ + "bun" + ] }, - "include": ["src", "test"] + "include": [ + "src/**/*.ts", + "src/**/*.tsx", + "test/**/*.ts", + "test/**/*.tsx" + ] } diff --git a/betterbase/package.json b/betterbase/package.json index eaa5a17..0f4c3d8 100644 --- a/betterbase/package.json +++ b/betterbase/package.json @@ -1,7 +1,7 @@ { "name": "betterbase", "private": true, - "packageManager": "bun@1.1.38", + "packageManager": "bun@1.2.14", "workspaces": [ "apps/*", "packages/*" @@ -10,7 +10,7 @@ "build": "turbo run build", "dev": "turbo run dev --parallel", "lint": "turbo run lint", - "typecheck": "turbo run typecheck" + "typecheck": "turbo run typecheck --filter '*'" }, "devDependencies": { "turbo": "^2.0.0", diff --git a/betterbase/packages/cli/package.json b/betterbase/packages/cli/package.json index 5b8543d..54f6fb8 100644 --- a/betterbase/packages/cli/package.json +++ b/betterbase/packages/cli/package.json @@ -16,11 +16,11 @@ "chalk": "^5.3.0", "commander": "^12.1.0", "inquirer": "^10.2.2", - "zod": "^3.23.8" + "zod": "^3.23.8", + "typescript": "^5.3.0" }, "devDependencies": { - "@types/bun": "^1.3.9", - "typescript": "^5.9.3" + "@types/bun": "^1.3.9" }, "exports": { ".": "./src/index.ts" diff --git a/betterbase/packages/cli/src/build.ts b/betterbase/packages/cli/src/build.ts index 198205e..86937ef 100644 --- a/betterbase/packages/cli/src/build.ts +++ b/betterbase/packages/cli/src/build.ts @@ -1,10 +1,16 @@ +import path from 'node:path'; + /** * Build the CLI as a standalone bundled executable output. */ export async function buildStandaloneCli(): Promise { + const moduleDir = import.meta.dir; + const entrypoint = path.resolve(moduleDir, 'index.ts'); + const outdir = path.resolve(moduleDir, '../dist'); + const result = await Bun.build({ - entrypoints: ['./src/index.ts'], - outdir: './dist', + entrypoints: [entrypoint], + outdir, target: 'bun', format: 'esm', minify: false, @@ -17,7 +23,7 @@ export async function buildStandaloneCli(): Promise { throw new Error(`Build failed with ${result.logs.length} error(s).\n${diagnostics}`); } - const outputPath = './dist/index.js'; + const outputPath = path.join(outdir, 'index.js'); const compiled = await Bun.file(outputPath).text(); await Bun.write(outputPath, `#!/usr/bin/env bun\n${compiled}`); } @@ -26,11 +32,7 @@ async function main(): Promise { await buildStandaloneCli(); } -const isEsmMain = typeof import.meta !== 'undefined' && import.meta.main; -const cjs = globalThis as unknown as { require?: { main?: unknown }; module?: unknown }; -const isCjsMain = cjs.require?.main !== undefined && cjs.require.main === cjs.module; - -if (isEsmMain || isCjsMain) { +if (import.meta.main) { main().catch((error) => { console.error('Build failed:', error); process.exit(1); diff --git a/betterbase/packages/cli/src/commands/auth.ts b/betterbase/packages/cli/src/commands/auth.ts new file mode 100644 index 0000000..3dbc76e --- /dev/null +++ b/betterbase/packages/cli/src/commands/auth.ts @@ -0,0 +1,238 @@ +import { execSync } from 'node:child_process'; +import { existsSync, mkdirSync, readFileSync, writeFileSync } from 'node:fs'; +import path from 'node:path'; +import * as logger from '../utils/logger'; + +const AUTH_SCHEMA_BLOCK = ` +// Auth tables (generated by BetterAuth) +export const sessions = sqliteTable('sessions', { + id: text('id').primaryKey(), + userId: text('user_id').notNull().references(() => users.id), + expiresAt: integer('expires_at', { mode: 'timestamp' }).notNull(), + ipAddress: text('ip_address'), + userAgent: text('user_agent'), +}); + +export const accounts = sqliteTable('accounts', { + id: text('id').primaryKey(), + userId: text('user_id').notNull().references(() => users.id), + provider: text('provider').notNull(), + providerAccountId: text('provider_account_id').notNull(), + accessToken: text('access_token'), + refreshToken: text('refresh_token'), + expiresAt: integer('expires_at', { mode: 'timestamp' }), +}); +`; + +const AUTH_ROUTE_FILE = `import { Hono } from 'hono'; +import { z } from 'zod'; +import { eq } from 'drizzle-orm'; +import { db } from '../db'; +import { users, sessions } from '../db/schema'; + +const authRoute = new Hono(); + +const loginSchema = z.object({ + email: z.string().email(), + password: z.string().min(8), +}); + +authRoute.post('/login', async (c) => { + const body = loginSchema.parse(await c.req.json()); + + const user = await db.select().from(users).where(eq(users.email, body.email)).limit(1); + if (user.length === 0) { + return c.json({ error: 'Invalid credentials' }, 401); + } + + const sessionId = crypto.randomUUID(); + const expiresAt = new Date(Date.now() + 30 * 24 * 60 * 60 * 1000); + + await db.insert(sessions).values({ + id: sessionId, + userId: user[0].id, + expiresAt, + ipAddress: c.req.header('cf-connecting-ip') || c.req.header('x-forwarded-for') || null, + userAgent: c.req.header('user-agent') || null, + }); + + return c.json({ + token: sessionId, + user: { + id: user[0].id, + email: user[0].email, + name: user[0].name, + }, + }); +}); + +authRoute.post('/logout', async (c) => { + const token = c.req.header('Authorization')?.split(' ')[1]; + if (token) { + await db.delete(sessions).where(eq(sessions.id, token)); + } + + return c.json({ message: 'Logged out' }); +}); + +export { authRoute }; +`; + +const AUTH_MIDDLEWARE_FILE = `import { and, eq, gt } from 'drizzle-orm'; +import { createMiddleware } from 'hono/factory'; +import { db } from '../db'; +import { sessions, users } from '../db/schema'; + +export interface AuthContext { + user: { + id: string; + email: string; + name: string | null; + }; +} + +function getSessionToken(authHeader: string | undefined): string | null { + if (!authHeader) return null; + + const parts = authHeader.split(' '); + if (parts.length !== 2 || parts[0] !== 'Bearer') return null; + + return parts[1]; +} + +async function validateSession(token: string): Promise { + const session = await db + .select() + .from(sessions) + .where(and(eq(sessions.id, token), gt(sessions.expiresAt, new Date()))) + .limit(1); + + if (session.length === 0) return null; + + const user = await db.select().from(users).where(eq(users.id, session[0].userId)).limit(1); + return user.length > 0 ? user[0] : null; +} + +export const requireAuth = createMiddleware<{ Variables: AuthContext }>(async (c, next) => { + const token = getSessionToken(c.req.header('Authorization')); + + if (!token) { + return c.json({ error: 'Unauthorized: No token provided' }, 401); + } + + const user = await validateSession(token); + if (!user) { + return c.json({ error: 'Unauthorized: Invalid or expired token' }, 401); + } + + c.set('user', user); + await next(); +}); + +export const optionalAuth = createMiddleware<{ Variables: Partial }>(async (c, next) => { + const token = getSessionToken(c.req.header('Authorization')); + + if (token) { + const user = await validateSession(token); + if (user) { + c.set('user', user); + } + } + + await next(); +}); + +export function getUser(c: { get: (key: 'user') => AuthContext['user'] }): AuthContext['user'] { + return c.get('user'); +} +`; + +function appendIfMissing(filePath: string, marker: string, content: string): void { + const current = readFileSync(filePath, 'utf-8'); + if (current.includes(marker)) { + return; + } + + const next = current.trimEnd() + '\n\n' + content.trim() + '\n'; + writeFileSync(filePath, next); +} + +function ensureAuthInConfig(projectRoot: string): void { + const configPath = path.join(projectRoot, 'betterbase.config.ts'); + if (!existsSync(configPath)) return; + + const current = readFileSync(configPath, 'utf-8'); + if (current.includes('auth: {')) return; + + const updated = current.replace( + 'export default {', + `export default {\n auth: {\n enabled: true,\n secret: process.env.AUTH_SECRET,\n sessionDuration: 30 * 24 * 60 * 60,\n },`, + ); + + writeFileSync(configPath, updated); +} + +function ensureEnvVar(projectRoot: string): void { + const envPath = path.join(projectRoot, '.env.example'); + if (!existsSync(envPath)) return; + + const env = readFileSync(envPath, 'utf-8'); + if (env.includes('AUTH_SECRET=')) return; + + writeFileSync(envPath, `${env.trimEnd()}\n\n# Auth\nAUTH_SECRET=your-secret-key-here-change-in-production\n`); +} + +function ensureRoutesIndexHook(projectRoot: string): void { + const routesIndexPath = path.join(projectRoot, 'src/routes/index.ts'); + if (!existsSync(routesIndexPath)) return; + + let current = readFileSync(routesIndexPath, 'utf-8'); + + if (!current.includes("import { authRoute } from './auth';")) { + current = current.replace("import { usersRoute } from './users';", "import { usersRoute } from './users';\nimport { authRoute } from './auth';"); + } + + if (!current.includes("app.route('/auth', authRoute);")) { + current = current.replace("app.route('/api/users', usersRoute);", "app.route('/api/users', usersRoute);\n app.route('/auth', authRoute);"); + } + + writeFileSync(routesIndexPath, current); +} + +export async function runAuthSetupCommand(projectRoot: string = process.cwd()): Promise { + const resolvedRoot = path.resolve(projectRoot); + const schemaPath = path.join(resolvedRoot, 'src/db/schema.ts'); + const middlewarePath = path.join(resolvedRoot, 'src/middleware/auth.ts'); + const routePath = path.join(resolvedRoot, 'src/routes/auth.ts'); + + if (!existsSync(schemaPath)) { + throw new Error(`Could not find schema file at ${schemaPath}. Run this command from a BetterBase app root.`); + } + + logger.info('šŸ” Setting up authentication...'); + + logger.info('šŸ“¦ Installing better-auth...'); + execSync('bun add better-auth', { cwd: resolvedRoot, stdio: 'inherit' }); + + logger.info('šŸ“ Adding auth tables to schema...'); + appendIfMissing(schemaPath, "export const sessions = sqliteTable('sessions'", AUTH_SCHEMA_BLOCK); + + logger.info('šŸ›”ļø Creating auth middleware...'); + mkdirSync(path.dirname(middlewarePath), { recursive: true }); + writeFileSync(middlewarePath, AUTH_MIDDLEWARE_FILE); + + logger.info('🧭 Creating auth routes...'); + mkdirSync(path.dirname(routePath), { recursive: true }); + writeFileSync(routePath, AUTH_ROUTE_FILE); + ensureRoutesIndexHook(resolvedRoot); + + logger.info('āš™ļø Updating config...'); + ensureAuthInConfig(resolvedRoot); + ensureEnvVar(resolvedRoot); + + logger.success('Authentication setup complete!'); + console.log('\nNext steps:'); + console.log(' 1. Set AUTH_SECRET in .env'); + console.log(' 2. Run: bun run db:push'); + console.log(' 3. Use requireAuth middleware on protected routes'); +} diff --git a/betterbase/packages/cli/src/commands/dev.ts b/betterbase/packages/cli/src/commands/dev.ts new file mode 100644 index 0000000..3720944 --- /dev/null +++ b/betterbase/packages/cli/src/commands/dev.ts @@ -0,0 +1,51 @@ +import { existsSync, watch } from 'node:fs'; +import path from 'node:path'; +import { ContextGenerator } from '../utils/context-generator'; +import * as logger from '../utils/logger'; + +export async function runDevCommand(projectRoot: string = process.cwd()): Promise { + const generator = new ContextGenerator(); + + await generator.generate(projectRoot); + + const watchPaths = [path.join(projectRoot, 'src/db/schema.ts'), path.join(projectRoot, 'src/routes')]; + const timers = new Map>(); + + for (const watchPath of watchPaths) { + if (!existsSync(watchPath)) { + logger.warn(`Watch path does not exist; skipping: ${watchPath}`); + continue; + } + + try { + watch(watchPath, { recursive: true }, (_eventType, filename) => { + console.log(`šŸ“ File changed: ${String(filename ?? '')}`); + + const existing = timers.get(watchPath); + if (existing) { + clearTimeout(existing); + } + + const timer = setTimeout(async () => { + console.log('šŸ”„ Regenerating context...'); + const start = Date.now(); + + try { + await generator.generate(projectRoot); + console.log(`āœ… Context updated in ${Date.now() - start}ms`); + } catch (error) { + const message = error instanceof Error ? error.message : String(error); + console.error(`āŒ Failed to regenerate context: ${message}`); + } + }, 250); + + timers.set(watchPath, timer); + }); + } catch (error) { + const message = error instanceof Error ? error.message : String(error); + logger.warn(`Failed to watch path ${watchPath}: ${message}`); + } + } + + console.log('šŸ‘€ Watching for schema and route changes...'); +} diff --git a/betterbase/packages/cli/src/commands/generate.ts b/betterbase/packages/cli/src/commands/generate.ts new file mode 100644 index 0000000..e9e9bae --- /dev/null +++ b/betterbase/packages/cli/src/commands/generate.ts @@ -0,0 +1,214 @@ +import { execSync } from 'node:child_process'; +import { existsSync, mkdirSync, readFileSync, writeFileSync } from 'node:fs'; +import path from 'node:path'; +import { SchemaScanner, type TableInfo } from '../utils/schema-scanner'; +import * as logger from '../utils/logger'; + +function toSingular(name: string): string { + return name.endsWith('s') ? name.slice(0, -1) : `${name}Item`; +} + +function schemaTypeToZod(type: string): string { + if (type === 'integer' || type === 'number') { + return 'z.coerce.number()'; + } + + if (type === 'boolean') { + return 'z.coerce.boolean()'; + } + + if (type === 'json') { + return 'z.unknown()'; + } + + if (type === 'datetime') { + return 'z.coerce.date()'; + } + + return 'z.string()'; +} + +function buildSchemaShape(table: TableInfo, mode: 'create' | 'update'): string { + const entries = Object.entries(table.columns) + .filter(([columnName, column]) => !(column.primaryKey || columnName === 'id')) + .map(([columnName, column]) => { + const base = schemaTypeToZod(column.type); + const optional = mode === 'update' || column.nullable || Boolean(column.defaultValue); + return ` ${columnName}: ${optional ? `${base}.optional()` : base}`; + }); + + return entries.join(',\n'); +} + +function generateRouteFile(tableName: string, table: TableInfo): string { + const singular = toSingular(tableName); + const createShape = buildSchemaShape(table, 'create'); + const updateShape = buildSchemaShape(table, 'update'); + + return `import { and, asc, desc, eq } from 'drizzle-orm'; +import { Hono } from 'hono'; +import { zValidator } from '@hono/zod-validator'; +import { z } from 'zod'; +import { db } from '../db'; +import { ${tableName} } from '../db/schema'; + +export const ${tableName}Route = new Hono(); + +const createSchema = z.object({ +${createShape} +}); + +const updateSchema = z.object({ +${updateShape} +}); + +${tableName}Route.get('/', async (c) => { + const limit = Number(c.req.query('limit') ?? 50); + const offset = Number(c.req.query('offset') ?? 0); + const safeLimit = Number.isFinite(limit) && limit >= 0 ? Math.min(limit, 100) : 50; + const safeOffset = Number.isFinite(offset) && offset >= 0 ? offset : 0; + + const queryParams = c.req.query(); + const sort = queryParams.sort; + + const filters = Object.entries(queryParams).filter(([key, value]) => { + return key !== 'limit' && key !== 'offset' && key !== 'sort' && value !== undefined; + }); + + let query = db.select().from(${tableName}).$dynamic(); + + if (filters.length > 0) { + const conditions = filters + .filter(([key]) => key in ${tableName}) + .map(([key, value]) => eq(${tableName}[key as keyof typeof ${tableName}] as never, value as never)); + + if (conditions.length > 0) { + query = query.where(and(...conditions)); + } + } + + if (sort) { + const [field, order] = sort.split(':'); + if (field && field in ${tableName}) { + const column = ${tableName}[field as keyof typeof ${tableName}] as never; + query = query.orderBy(order === 'desc' ? desc(column) : asc(column)); + } + } + + const items = await query.limit(safeLimit).offset(safeOffset); + return c.json({ ${tableName}: items, count: items.length, pagination: { limit: safeLimit, offset: safeOffset } }); +}); + +${tableName}Route.get('/:id', async (c) => { + const id = c.req.param('id'); + const item = await db.select().from(${tableName}).where(eq(${tableName}.id, id as never)).limit(1); + + if (item.length === 0) { + return c.json({ error: '${tableName} not found' }, 404); + } + + return c.json({ ${singular}: item[0] }); +}); + +${tableName}Route.post('/', zValidator('json', createSchema), async (c) => { + const body = c.req.valid('json'); + const created = await db.insert(${tableName}).values(body).returning(); + return c.json({ ${singular}: created[0] }, 201); +}); + +${tableName}Route.patch('/:id', zValidator('json', updateSchema), async (c) => { + const id = c.req.param('id'); + const body = c.req.valid('json'); + + const updated = await db.update(${tableName}).set(body).where(eq(${tableName}.id, id as never)).returning(); + if (updated.length === 0) { + return c.json({ error: '${tableName} not found' }, 404); + } + + return c.json({ ${singular}: updated[0] }); +}); + +${tableName}Route.delete('/:id', async (c) => { + const id = c.req.param('id'); + const deleted = await db.delete(${tableName}).where(eq(${tableName}.id, id as never)).returning(); + + if (deleted.length === 0) { + return c.json({ error: '${tableName} not found' }, 404); + } + + return c.json({ message: '${singular} deleted', ${singular}: deleted[0] }); +}); +`; +} + +function updateMainRouter(projectRoot: string, tableName: string): void { + const routerPath = path.join(projectRoot, 'src/routes/index.ts'); + if (!existsSync(routerPath)) { + logger.warn(`Routes index not found at ${routerPath}. Please wire the route manually.`); + return; + } + + let router = readFileSync(routerPath, 'utf-8'); + const importLine = `import { ${tableName}Route } from './${tableName}';`; + const routeLine = ` app.route('/api/${tableName}', ${tableName}Route);`; + + if (!router.includes(importLine)) { + const firstRouteImport = /import\s+\{\s*healthRoute\s*\}\s+from\s+'\.\/health';/; + if (firstRouteImport.test(router)) { + router = router.replace(firstRouteImport, (m) => `${m}\n${importLine}`); + } else { + router = `${importLine}\n${router}`; + } + } + + if (!router.includes(routeLine)) { + const routeStatements = [...router.matchAll(/\s*app\.route\([^\n]+\);/g)]; + if (routeStatements.length > 0) { + const last = routeStatements[routeStatements.length - 1]; + const insertAt = (last.index ?? 0) + last[0].length; + router = `${router.slice(0, insertAt)}\n${routeLine}${router.slice(insertAt)}`; + } else { + router = router.replace(/\n}\s*$/, `\n${routeLine}\n}`); + } + } + + writeFileSync(routerPath, router); +} + +export async function runGenerateCrudCommand(projectRoot: string, tableName: string): Promise { + const resolvedRoot = path.resolve(projectRoot); + const schemaPath = path.join(resolvedRoot, 'src/db/schema.ts'); + + if (!existsSync(schemaPath)) { + throw new Error(`Schema file not found at ${schemaPath}`); + } + + logger.info(`šŸ”Ø Generating CRUD for ${tableName}...`); + + const scanner = new SchemaScanner(schemaPath); + const tables = scanner.scan(); + + const table = tables[tableName]; + if (!table) { + throw new Error(`Table "${tableName}" not found in schema.`); + } + + logger.info('šŸ“¦ Installing @hono/zod-validator...'); + execSync('bun add @hono/zod-validator', { cwd: resolvedRoot, stdio: 'inherit' }); + + const routesDir = path.join(resolvedRoot, 'src/routes'); + mkdirSync(routesDir, { recursive: true }); + + const routePath = path.join(routesDir, `${tableName}.ts`); + writeFileSync(routePath, generateRouteFile(tableName, table)); + + updateMainRouter(resolvedRoot, tableName); + + logger.success(`Generated ${routePath}`); + console.log('\nEndpoints created:'); + console.log(` GET /api/${tableName}`); + console.log(` GET /api/${tableName}/:id`); + console.log(` POST /api/${tableName}`); + console.log(` PATCH /api/${tableName}/:id`); + console.log(` DELETE /api/${tableName}/:id`); +} diff --git a/betterbase/packages/cli/src/commands/init.ts b/betterbase/packages/cli/src/commands/init.ts index 5859b89..abe5481 100644 --- a/betterbase/packages/cli/src/commands/init.ts +++ b/betterbase/packages/cli/src/commands/init.ts @@ -63,8 +63,8 @@ async function initializeGitRepository(projectPath: string): Promise { function buildPackageJson(projectName: string, databaseMode: DatabaseMode, useAuth: boolean): string { const dependencies: Record = { hono: '^4.11.9', - 'drizzle-orm': '^0.44.5', - zod: '^3.25.76', + 'drizzle-orm': '^0.45.1', + zod: '^4.3.6', }; if (databaseMode === 'turso') { @@ -338,19 +338,20 @@ import { logger } from 'hono/logger'; import { HTTPException } from 'hono/http-exception'; import { healthRoute } from './health'; import { usersRoute } from './users'; +import { env } from '../lib/env'; -export default function registerRoutes(app: Hono): void { +export function registerRoutes(app: Hono): void { app.use('*', cors()); app.use('*', logger()); app.onError((err, c) => { const isHttpError = err instanceof HTTPException; - const showDetailedError = process.env.NODE_ENV === 'development' || isHttpError; + const showDetailedError = env.NODE_ENV === 'development' || isHttpError; return c.json( { error: showDetailedError ? err.message : 'Internal Server Error', - stack: process.env.NODE_ENV === 'development' ? err.stack : undefined, + stack: env.NODE_ENV === 'development' ? err.stack : undefined, details: isHttpError ? (err as { cause?: unknown }).cause ?? null : null, }, isHttpError ? err.status : 500, @@ -374,6 +375,19 @@ async function writeProjectFiles( await mkdir(path.join(projectPath, 'src/middleware'), { recursive: true }); await mkdir(path.join(projectPath, 'src/lib'), { recursive: true }); + + await writeFile( + path.join(projectPath, 'src/lib/env.ts'), + `import { z } from 'zod'; + +const envSchema = z.object({ + NODE_ENV: z.enum(['development', 'test', 'production']).default('development'), +}); + +export const env = envSchema.parse(process.env); +`, + ); + await writeFile( path.join(projectPath, 'betterbase.config.ts'), `export default { @@ -427,7 +441,6 @@ bun.lockb .env.* !.env.example local.db -.drizzle `, ); @@ -447,7 +460,7 @@ export const healthRoute = new Hono(); healthRoute.get('/', async (c) => { try { - await db.run(sql\`select 1\`); + await db.${databaseMode === 'local' ? 'run' : 'execute'}(sql\`select 1\`); return c.json({ status: 'healthy', @@ -471,9 +484,9 @@ healthRoute.get('/', async (c) => { await writeFile( path.join(projectPath, 'src/middleware/validation.ts'), `import { HTTPException } from 'hono/http-exception'; -import type { ZodType } from 'zod'; +import { z } from 'zod'; -export function parseBody(schema: ZodType, body: unknown): T { +export function parseBody(schema: S, body: unknown): z.output { const result = schema.safeParse(body); if (!result.success) { @@ -510,9 +523,60 @@ const createUserSchema = z.object({ export const usersRoute = new Hono(); +const DEFAULT_LIMIT = 25; +const MAX_LIMIT = 100; +const DEFAULT_OFFSET = 0; + +function parseNonNegativeInt(value: string | undefined, fallback: number): number { + if (!value) { + return fallback; + } + + const parsed = Number(value); + if (!Number.isInteger(parsed) || parsed < 0) { + return fallback; + } + + return parsed; +} + usersRoute.get('/', async (c) => { - const allUsers = await db.select().from(users); - return c.json({ users: allUsers }); + const requestedLimit = parseNonNegativeInt(c.req.query('limit'), DEFAULT_LIMIT); + const limit = Math.min(requestedLimit, MAX_LIMIT); + const offset = parseNonNegativeInt(c.req.query('offset'), DEFAULT_OFFSET); + + if (limit === 0) { + return c.json({ + users: [], + pagination: { + limit, + offset, + hasMore: false, + }, + }); + } + + try { + const rows = await db.select().from(users).limit(limit + 1).offset(offset); + const hasMore = rows.length > limit; + const paginatedUsers = rows.slice(0, limit); + + return c.json({ + users: paginatedUsers, + pagination: { + limit, + offset, + hasMore, + }, + }); + } catch (error) { + if (error instanceof HTTPException) { + throw error; + } + + console.error('Failed to fetch users:', error); + throw error; + } }); usersRoute.post('/', async (c) => { @@ -545,7 +609,7 @@ usersRoute.post('/', async (c) => { await writeFile( path.join(projectPath, 'src/index.ts'), `import { Hono } from 'hono'; -import registerRoutes from './routes'; +import { registerRoutes } from './routes'; const app = new Hono(); registerRoutes(app); diff --git a/betterbase/packages/cli/src/commands/migrate.ts b/betterbase/packages/cli/src/commands/migrate.ts index 602915b..8999867 100644 --- a/betterbase/packages/cli/src/commands/migrate.ts +++ b/betterbase/packages/cli/src/commands/migrate.ts @@ -1,32 +1,352 @@ +import { Database } from 'bun:sqlite'; +import chalk from 'chalk'; +import { access, mkdir, readdir } from 'node:fs/promises'; +import path from 'node:path'; import { z } from 'zod'; +import { DEFAULT_DB_PATH } from '../constants'; import * as logger from '../utils/logger'; import * as prompts from '../utils/prompts'; const migrateOptionsSchema = z.object({ - destructive: z.boolean().optional(), + preview: z.boolean().optional(), + production: z.boolean().optional(), }); export type MigrateCommandOptions = z.infer; -/** - * Run the `bb migrate` command. - */ +export type MigrationChangeType = + | 'create_table' + | 'add_column' + | 'modify_column' + | 'drop_column' + | 'drop_table'; + +export interface MigrationChange { + type: MigrationChangeType; + table: string; + column?: string; + detail?: string; + isDestructive: boolean; +} + +interface DrizzleResult { + success: boolean; + stdout: string; + stderr: string; +} + +interface MigrationBackup { + sourcePath: string; + backupPath: string; +} + +const DRIZZLE_DIR = 'drizzle'; + +async function runDrizzleKit(args: string[]): Promise { + const proc = Bun.spawn(['bunx', 'drizzle-kit', ...args], { + cwd: process.cwd(), + stdout: 'pipe', + stderr: 'pipe', + }); + + const [stdout, stderr, exitCode] = await Promise.all([ + new Response(proc.stdout).text(), + new Response(proc.stderr).text(), + proc.exited, + ]); + + return { + success: exitCode === 0, + stdout, + stderr, + }; +} + +async function listSqlFiles(baseDir: string): Promise> { + const entries = new Map(); + const root = path.join(process.cwd(), baseDir); + + const walk = async (dir: string): Promise => { + try { + await access(dir); + } catch { + return; + } + + for (const entry of await readdir(dir, { withFileTypes: true })) { + const fullPath = path.join(dir, entry.name); + + if (entry.isDirectory()) { + await walk(fullPath); + continue; + } + + if (!fullPath.endsWith('.sql')) { + continue; + } + + entries.set(path.relative(root, fullPath), await Bun.file(fullPath).text()); + } + }; + + await walk(root); + return entries; +} + +function analyzeMigration(sqlStatements: string[]): MigrationChange[] { + const changes: MigrationChange[] = []; + + for (const statement of sqlStatements) { + const sql = statement.trim(); + if (!sql) { + continue; + } + + const createTable = sql.match(/create\s+table\s+"?([\w.-]+)"?/i); + if (createTable) { + changes.push({ type: 'create_table', table: createTable[1], isDestructive: false, detail: sql }); + continue; + } + + const dropTable = sql.match(/drop\s+table\s+"?([\w.-]+)"?/i); + if (dropTable) { + changes.push({ type: 'drop_table', table: dropTable[1], isDestructive: true, detail: sql }); + continue; + } + + const addColumn = sql.match(/alter\s+table\s+"?([\w.-]+)"?\s+add\s+column\s+"?([\w.-]+)"?/i); + if (addColumn) { + changes.push({ + type: 'add_column', + table: addColumn[1], + column: addColumn[2], + isDestructive: false, + detail: sql, + }); + continue; + } + + const dropColumn = sql.match(/alter\s+table\s+"?([\w.-]+)"?\s+drop\s+column\s+"?([\w.-]+)"?/i); + if (dropColumn) { + changes.push({ + type: 'drop_column', + table: dropColumn[1], + column: dropColumn[2], + isDestructive: true, + detail: sql, + }); + continue; + } + + const alterColumn = sql.match( + /alter\s+table\s+"?([\w.-]+)"?\s+(alter\s+column\s+"?([\w.-]+)"?|rename\s+column\s+"?([\w.-]+)"?)/i, + ); + if (alterColumn) { + changes.push({ + type: 'modify_column', + table: alterColumn[1], + column: alterColumn[3] ?? alterColumn[4], + isDestructive: /drop\s+not\s+null|set\s+not\s+null|set\s+data\s+type|rename\s+column/i.test(sql), + detail: sql, + }); + continue; + } + } + + return changes; +} + +function displayDiff(changes: MigrationChange[]): void { + console.log('\nšŸ“Š Migration Preview\n'); + + if (changes.length === 0) { + console.log(chalk.gray('No schema changes detected.')); + return; + } + + const newTables = changes.filter((c) => c.type === 'create_table'); + const newColumns = changes.filter((c) => c.type === 'add_column'); + const modified = changes.filter((c) => c.type === 'modify_column'); + const destructive = changes.filter((c) => c.isDestructive); + + if (newTables.length > 0) { + console.log(chalk.green('āœ… New Tables:')); + for (const change of newTables) { + console.log(chalk.green(` + ${change.table}`)); + } + console.log(''); + } + + if (newColumns.length > 0) { + console.log(chalk.green('āœ… New Columns:')); + for (const change of newColumns) { + console.log(chalk.green(` + ${change.table}.${change.column ?? ''}`)); + } + console.log(''); + } + + if (modified.length > 0) { + console.log(chalk.yellow('āš ļø Modified Columns:')); + for (const change of modified) { + console.log(chalk.yellow(` ! ${change.table}.${change.column ?? ''}`)); + } + console.log(''); + } + + if (destructive.length > 0) { + console.log(chalk.red('āŒ Destructive Changes:')); + for (const change of destructive) { + console.log(chalk.red(` - ${change.type}: ${change.table}${change.column ? `.${change.column}` : ''}`)); + console.log(chalk.red(' āš ļø This will DELETE DATA')); + } + console.log(''); + } +} + +async function confirmDestructive(changes: MigrationChange[]): Promise { + const destructive = changes.filter((c) => c.isDestructive); + if (destructive.length === 0) { + return true; + } + + logger.warn('DESTRUCTIVE CHANGES DETECTED:'); + for (const change of destructive) { + console.log(` - ${change.type}: ${change.table}${change.column ? `.${change.column}` : ''}`); + } + + const confirmation = await prompts.text({ message: 'Type "delete data" to confirm:' }); + if (confirmation !== 'delete data') { + logger.warn('Confirmation phrase mismatch. Migration cancelled.'); + return false; + } + + return true; +} + +async function backupDatabase(): Promise { + const sourcePath = process.env.DB_PATH ?? DEFAULT_DB_PATH; + + try { + await access(sourcePath); + } catch { + logger.warn(`No local database found at ${sourcePath}; skipping backup.`); + return null; + } + + const timestamp = new Date().toISOString().replace(/:/g, '-'); + const backupDir = path.join(process.cwd(), 'backups'); + await mkdir(backupDir, { recursive: true }); + + const backupPath = path.join(backupDir, `db-${timestamp}.sqlite`); + + const db = new Database(sourcePath, { readonly: true }); + try { + const snapshot = db.serialize(); + await Bun.write(backupPath, snapshot); + } finally { + db.close(); + } + + logger.success(`Backup saved: ${backupPath}`); + return { sourcePath, backupPath }; +} + +async function restoreBackup(backup: MigrationBackup | null): Promise { + if (backup === null) { + return; + } + + const bytes = await Bun.file(backup.backupPath).bytes(); + await Bun.write(backup.sourcePath, bytes); + + logger.warn(`Rollback complete. Restored database from ${backup.backupPath}`); +} + +function splitStatements(sql: string): string[] { + return sql + .split(/;\s*/g) + .map((statement) => statement.trim()) + .filter((statement) => statement.length > 0); +} + +async function collectChangesFromGenerate(): Promise { + const before = await listSqlFiles(DRIZZLE_DIR); + const generate = await runDrizzleKit(['generate']); + + if (!generate.success) { + if (/conflict|merge/i.test(generate.stderr)) { + throw new Error(`Migration conflict detected. Resolve migration files manually.\n${generate.stderr}`); + } + + throw new Error(`Failed to generate migrations.\n${generate.stderr || generate.stdout}`); + } + + const after = await listSqlFiles(DRIZZLE_DIR); + const changedSql: string[] = []; + + for (const [relativePath, content] of after.entries()) { + const previous = before.get(relativePath); + if (previous === content) { + continue; + } + + changedSql.push(...splitStatements(content)); + } + + return analyzeMigration(changedSql); +} + export async function runMigrateCommand(rawOptions: MigrateCommandOptions): Promise { const options = migrateOptionsSchema.parse(rawOptions); - const shouldContinue = - options.destructive === true - ? true - : await prompts.confirm({ - message: 'This migration may include destructive changes. Continue?', - initial: false, - }); + logger.info('Generating migration files with drizzle-kit...'); + const changes = await collectChangesFromGenerate(); + displayDiff(changes); - if (!shouldContinue) { - logger.warn('Migration cancelled by user.'); + if (options.preview) { + logger.info('Preview mode enabled. No migrations applied.'); return; } - logger.info('Analyzing migration plan...'); - logger.success('Migration scaffold complete. (Placeholder implementation)'); + if (options.production) { + const proceed = await prompts.confirm({ + message: 'Apply migrations to production now?', + initial: false, + }); + + if (!proceed) { + logger.warn('Migration cancelled by user.'); + return; + } + } + + let backup: MigrationBackup | null = null; + + if (changes.some((change) => change.isDestructive)) { + backup = await backupDatabase(); + + const confirmed = await confirmDestructive(changes); + if (!confirmed) { + return; + } + } + + logger.info('Applying migrations with drizzle-kit push...'); + const push = await runDrizzleKit(['push']); + + if (!push.success) { + await restoreBackup(backup); + + if (/\b(?:connect(?:ion)?|econnrefused|econnreset|enotfound|etimedout)\b/i.test(push.stderr)) { + throw new Error(`Database connection failed while applying migration.\n${push.stderr}`); + } + + if (/conflict|merge/i.test(push.stderr)) { + throw new Error(`Migration conflict detected during push. Please resolve and retry.\n${push.stderr}`); + } + + throw new Error(`Migration push failed.\n${push.stderr || push.stdout}`); + } + + logger.success('Migration complete!'); } diff --git a/betterbase/packages/cli/src/constants.ts b/betterbase/packages/cli/src/constants.ts new file mode 100644 index 0000000..8daff7f --- /dev/null +++ b/betterbase/packages/cli/src/constants.ts @@ -0,0 +1 @@ +export const DEFAULT_DB_PATH = 'local.db'; diff --git a/betterbase/packages/cli/src/index.ts b/betterbase/packages/cli/src/index.ts index e0136da..25e8e04 100644 --- a/betterbase/packages/cli/src/index.ts +++ b/betterbase/packages/cli/src/index.ts @@ -1,6 +1,9 @@ import { Command, CommanderError } from 'commander'; import { runInitCommand } from './commands/init'; +import { runDevCommand } from './commands/dev'; import { runMigrateCommand } from './commands/migrate'; +import { runAuthSetupCommand } from './commands/auth'; +import { runGenerateCrudCommand } from './commands/generate'; import * as logger from './utils/logger'; import packageJson from '../package.json'; @@ -24,12 +27,57 @@ export function createProgram(): Command { await runInitCommand({ projectName }); }); + + program + .command('dev') + .description('Watch schema/routes and regenerate .betterbase-context.json') + .argument('[project-root]', 'project root directory', process.cwd()) + .action(async (projectRoot: string) => { + await runDevCommand(projectRoot); + }); + + + const auth = program.command('auth').description('Authentication helpers'); + + auth + .command('setup') + .description('Install and scaffold BetterAuth integration') + .argument('[project-root]', 'project root directory', process.cwd()) + .action(async (projectRoot: string) => { + await runAuthSetupCommand(projectRoot); + }); + + + const generate = program.command('generate').description('Code generation helpers'); + + generate + .command('crud') + .description('Generate full CRUD routes for a table') + .argument('', 'table name from src/db/schema.ts') + .argument('[project-root]', 'project root directory', process.cwd()) + .action(async (tableName: string, projectRoot: string) => { + await runGenerateCrudCommand(projectRoot, tableName); + }); + program .command('migrate') - .description('Run BetterBase database migrations') - .option('--destructive', 'allow destructive migration flow') - .action(async (options: { destructive?: boolean }) => { - await runMigrateCommand({ destructive: options.destructive }); + .description('Generate and apply migrations for local development') + .action(async () => { + await runMigrateCommand({}); + }); + + program + .command('migrate:preview') + .description('Preview migration diff without applying changes') + .action(async () => { + await runMigrateCommand({ preview: true }); + }); + + program + .command('migrate:production') + .description('Apply migrations to production (requires confirmation)') + .action(async () => { + await runMigrateCommand({ production: true }); }); return program; diff --git a/betterbase/packages/cli/src/utils/context-generator.ts b/betterbase/packages/cli/src/utils/context-generator.ts new file mode 100644 index 0000000..aabf0f4 --- /dev/null +++ b/betterbase/packages/cli/src/utils/context-generator.ts @@ -0,0 +1,84 @@ +import { existsSync, writeFileSync } from 'node:fs'; +import path from 'node:path'; +import { RouteScanner, type RouteInfo } from './route-scanner'; +import { SchemaScanner, type TableInfo } from './schema-scanner'; +import * as logger from './logger'; + +export interface BetterBaseContext { + version: string; + generated_at: string; + tables: Record; + routes: Record; + ai_prompt: string; +} + +export class ContextGenerator { + async generate(projectRoot: string): Promise { + const schemaPath = path.join(projectRoot, 'src/db/schema.ts'); + const routesPath = path.join(projectRoot, 'src/routes'); + + let tables: Record = {}; + let routes: Record = {}; + + if (existsSync(schemaPath)) { + const schemaScanner = new SchemaScanner(schemaPath); + tables = schemaScanner.scan(); + } else { + logger.warn(`Schema file not found; continuing with empty tables: ${schemaPath}`); + } + + if (existsSync(routesPath)) { + const routeScanner = new RouteScanner(); + routes = routeScanner.scan(routesPath); + } else { + logger.warn(`Routes directory not found; continuing with empty routes: ${routesPath}`); + } + + const context: BetterBaseContext = { + version: '1.0.0', + generated_at: new Date().toISOString(), + tables, + routes, + ai_prompt: this.generateAIPrompt(tables, routes), + }; + + const outputPath = path.join(projectRoot, '.betterbase-context.json'); + writeFileSync(outputPath, `${JSON.stringify(context, null, 2)}\n`); + console.log(`āœ… Generated ${outputPath}`); + + return context; + } + + private generateAIPrompt(tables: Record, routes: Record): string { + const tableNames = Object.keys(tables); + const routeCount = Object.values(routes).reduce((count, methods) => count + methods.length, 0); + + let prompt = `This is a BetterBase backend project with ${tableNames.length} tables and ${routeCount} API endpoints.\n\n`; + + prompt += 'DATABASE SCHEMA:\n'; + for (const tableName of tableNames) { + const table = tables[tableName]; + const columns = Object.keys(table.columns ?? {}).join(', '); + prompt += `- ${tableName}: ${columns}\n`; + if (table.relations.length > 0) { + prompt += ` Relations: ${table.relations.join(', ')}\n`; + } + } + + prompt += '\nAPI ENDPOINTS:\n'; + for (const [routePath, methods] of Object.entries(routes)) { + for (const route of methods) { + const auth = route.requiresAuth ? ' [AUTH REQUIRED]' : ''; + prompt += `- ${route.method} ${routePath}${auth}\n`; + } + } + + prompt += '\nWhen writing code for this project:\n'; + prompt += "1. Always import tables from './src/db/schema'\n"; + prompt += '2. Use Drizzle ORM for database queries\n'; + prompt += '3. Validate inputs with Zod\n'; + prompt += '4. Return JSON responses with proper status codes\n'; + + return prompt; + } +} diff --git a/betterbase/packages/cli/src/utils/route-scanner.ts b/betterbase/packages/cli/src/utils/route-scanner.ts new file mode 100644 index 0000000..5fc7aa9 --- /dev/null +++ b/betterbase/packages/cli/src/utils/route-scanner.ts @@ -0,0 +1,165 @@ +import { readdirSync, readFileSync } from 'node:fs'; +import path from 'node:path'; +import * as ts from 'typescript'; + +export interface RouteInfo { + method: string; + path: string; + requiresAuth: boolean; + inputSchema?: string; + outputSchema?: string; +} + +function getStringLiteral(node: ts.Node | undefined): string { + if (!node) return ''; + if (ts.isStringLiteral(node) || ts.isNoSubstitutionTemplateLiteral(node)) { + return node.text; + } + return node.getText(); +} + +function isAuthLikeName(value: string): boolean { + return /\bauth\b/i.test(value) || /^auth/i.test(value) || /^(authMiddleware|requireAuth)$/i.test(value); +} + +function collectTsFiles(dir: string): string[] { + const files: string[] = []; + + const walk = (current: string): void => { + let entries: ReturnType; + try { + entries = readdirSync(current, { withFileTypes: true }); + } catch { + return; + } + + for (const entry of entries) { + const fullPath = path.join(current, entry.name); + if (entry.isDirectory()) { + walk(fullPath); + continue; + } + + if (entry.isFile() && entry.name.endsWith('.ts') && !entry.name.endsWith('.d.ts')) { + files.push(fullPath); + } + } + }; + + walk(dir); + return files; +} + +export class RouteScanner { + scan(routesDir: string): Record { + const files = collectTsFiles(routesDir); + const routes: Record = {}; + + for (const file of files) { + const fileRoutes = this.scanFile(file); + for (const [routePath, entries] of Object.entries(fileRoutes)) { + routes[routePath] = [...(routes[routePath] ?? []), ...entries]; + } + } + + return routes; + } + + private scanFile(filePath: string): Record { + const sourceCode = readFileSync(filePath, 'utf-8'); + const sourceFile = ts.createSourceFile(filePath, sourceCode, ts.ScriptTarget.Latest, true, ts.ScriptKind.TS); + + const routes: Record = {}; + const authIdentifiers = new Set(); + + const isAuthMiddlewareExpression = (expr: ts.Expression): boolean => { + if (ts.isIdentifier(expr)) { + return authIdentifiers.has(expr.text) || isAuthLikeName(expr.text); + } + + if (ts.isPropertyAccessExpression(expr)) { + const text = expr.getText(sourceFile); + return isAuthLikeName(text); + } + + return false; + }; + + const collectAuthIdentifiers = (node: ts.Node): void => { + if (!ts.isVariableStatement(node)) return; + + for (const declaration of node.declarationList.declarations) { + if (!ts.isIdentifier(declaration.name) || !declaration.initializer) continue; + const initializer = declaration.initializer; + if (ts.isCallExpression(initializer) && ts.isIdentifier(initializer.expression)) { + if (initializer.expression.text === 'createMiddleware' || initializer.expression.text === 'requireAuth') { + authIdentifiers.add(declaration.name.text); + } + } + + if (isAuthLikeName(declaration.name.text)) { + authIdentifiers.add(declaration.name.text); + } + } + }; + + ts.forEachChild(sourceFile, collectAuthIdentifiers); + + const visit = (node: ts.Node): void => { + if (ts.isCallExpression(node) && ts.isPropertyAccessExpression(node.expression)) { + const method = node.expression.name.text.toLowerCase(); + const httpMethods = new Set(['get', 'post', 'put', 'patch', 'delete', 'options', 'head']); + + if (httpMethods.has(method)) { + const [pathArg, ...handlerArgs] = node.arguments; + const routePath = getStringLiteral(pathArg); + + let requiresAuth = false; + for (const arg of handlerArgs) { + if (isAuthMiddlewareExpression(arg)) { + requiresAuth = true; + break; + } + } + + const route: RouteInfo = { + method: method.toUpperCase(), + path: routePath, + requiresAuth, + inputSchema: this.findSchemaUsage(sourceFile, handlerArgs, 'input'), + outputSchema: this.findSchemaUsage(sourceFile, handlerArgs, 'output'), + }; + + if (!routes[routePath]) { + routes[routePath] = []; + } + + routes[routePath].push(route); + } + } + + ts.forEachChild(node, visit); + }; + + visit(sourceFile); + return routes; + } + + private findSchemaUsage(sourceFile: ts.SourceFile, args: ts.NodeArray, mode: 'input' | 'output'): string | undefined { + const text = args.map((arg) => arg.getText(sourceFile)).join('\n'); + + if (mode === 'input') { + const parseMatch = text.match(/([A-Za-z0-9_]+Schema)\.(safeParse|parse)\(/); + if (parseMatch) return parseMatch[1]; + const middlewareMatch = text.match(/parseBody\(([^,]+),/); + if (middlewareMatch) return middlewareMatch[1].trim(); + } + + if (mode === 'output') { + const outputMatch = text.match(/([A-Za-z0-9_]+Schema)\.(parse|safeParse)\([^)]*c\.json/); + if (outputMatch) return outputMatch[1]; + } + + return undefined; + } +} diff --git a/betterbase/packages/cli/src/utils/scanner.ts b/betterbase/packages/cli/src/utils/scanner.ts new file mode 100644 index 0000000..0dee86d --- /dev/null +++ b/betterbase/packages/cli/src/utils/scanner.ts @@ -0,0 +1,252 @@ +import { readFileSync } from 'node:fs'; +import * as ts from 'typescript'; + +export interface ColumnInfo { + name: string; + type: string; + nullable: boolean; + unique: boolean; + primaryKey: boolean; + defaultValue?: string; + references?: string; +} + +export interface TableInfo { + name: string; + columns: Record; + relations: string[]; + indexes: string[]; +} + +function unwrapExpression(expression: ts.Expression): ts.Expression { + let current = expression; + + while ( + ts.isParenthesizedExpression(current) || + ts.isAsExpression(current) || + ts.isTypeAssertionExpression(current) || + ts.isSatisfiesExpression(current) + ) { + current = (current as ts.ParenthesizedExpression | ts.AsExpression | ts.TypeAssertion | ts.SatisfiesExpression) + .expression; + } + + return current; +} + +function getCallName(call: ts.CallExpression): string { + if (ts.isIdentifier(call.expression)) { + return call.expression.text; + } + + if (ts.isPropertyAccessExpression(call.expression)) { + return call.expression.name.text; + } + + return ''; +} + +function getExpressionText(sourceFile: ts.SourceFile, node: ts.Node | undefined): string { + if (!node) { + return ''; + } + + return node.getText(sourceFile); +} + +export class SchemaScanner { + private readonly sourceFile: ts.SourceFile; + + constructor(schemaPath: string) { + let sourceCode: string; + + try { + sourceCode = readFileSync(schemaPath, 'utf-8'); + } catch (error) { + const message = error instanceof Error ? error.message : String(error); + throw new Error(`Failed to read schema file at ${schemaPath}: ${message}`); + } + + this.sourceFile = ts.createSourceFile(schemaPath, sourceCode, ts.ScriptTarget.Latest, true, ts.ScriptKind.TS); + } + + scan(): Record { + const tables: Record = {}; + + const visit = (node: ts.Node): void => { + if (ts.isVariableStatement(node)) { + for (const declaration of node.declarationList.declarations) { + if (!ts.isIdentifier(declaration.name) || !declaration.initializer) { + continue; + } + + const initializer = unwrapExpression(declaration.initializer); + if (!ts.isCallExpression(initializer)) { + continue; + } + + const functionName = getCallName(initializer); + if (functionName === 'sqliteTable' || functionName === 'pgTable' || functionName === 'mysqlTable') { + tables[declaration.name.text] = this.parseTable(initializer); + } + } + } + + ts.forEachChild(node, visit); + }; + + visit(this.sourceFile); + return tables; + } + + private parseTable(callExpression: ts.CallExpression): TableInfo { + const [nameArg, columnsArg, indexesArg] = callExpression.arguments; + const tableName = ts.isStringLiteral(nameArg) ? nameArg.text : getExpressionText(this.sourceFile, nameArg); + + const columns: Record = {}; + const relations: string[] = []; + + if (columnsArg && ts.isObjectLiteralExpression(columnsArg)) { + for (const property of columnsArg.properties) { + if (!ts.isPropertyAssignment(property)) { + continue; + } + + const columnName = ts.isIdentifier(property.name) + ? property.name.text + : ts.isStringLiteral(property.name) + ? property.name.text + : property.name.getText(this.sourceFile); + + const columnInfo = this.parseColumn(columnName, property.initializer); + columns[columnName] = columnInfo; + + if (columnInfo.references) { + relations.push(columnInfo.references); + } + } + } + + const indexes = this.parseIndexes(indexesArg); + + return { + name: tableName, + columns, + relations, + indexes, + }; + } + + private parseIndexes(indexesArg: ts.Expression | undefined): string[] { + if (!indexesArg) { + return []; + } + + const indexes: string[] = []; + const indexRoot = unwrapExpression(indexesArg); + + const collectFromObject = (obj: ts.ObjectLiteralExpression): void => { + for (const property of obj.properties) { + if (!ts.isPropertyAssignment(property)) { + continue; + } + + const value = unwrapExpression(property.initializer); + if (!ts.isCallExpression(value)) { + continue; + } + + const callName = getCallName(value); + if (callName === 'index' || callName === 'uniqueIndex') { + const key = ts.isIdentifier(property.name) + ? property.name.text + : ts.isStringLiteral(property.name) + ? property.name.text + : property.name.getText(this.sourceFile); + indexes.push(key); + } + } + }; + + if (ts.isArrowFunction(indexRoot) || ts.isFunctionExpression(indexRoot)) { + const body = indexRoot.body; + if (ts.isObjectLiteralExpression(body)) { + collectFromObject(body); + } + + if (ts.isBlock(body)) { + for (const statement of body.statements) { + if (!ts.isReturnStatement(statement) || !statement.expression) { + continue; + } + + const expression = unwrapExpression(statement.expression); + if (ts.isObjectLiteralExpression(expression)) { + collectFromObject(expression); + } + } + } + } + + return indexes; + } + + private parseColumn(columnName: string, expression: ts.Expression): ColumnInfo { + let type = 'unknown'; + let nullable = true; + let unique = false; + let primaryKey = false; + let defaultValue: string | undefined; + let references: string | undefined; + + let current = unwrapExpression(expression); + + while (ts.isCallExpression(current)) { + const methodName = getCallName(current); + + if (methodName === 'text' || methodName === 'varchar' || methodName === 'char') { + type = 'text'; + } else if (methodName === 'integer' || methodName === 'int' || methodName === 'bigint' || methodName === 'serial') { + type = 'integer'; + } else if (methodName === 'real' || methodName === 'numeric' || methodName === 'decimal' || methodName === 'doublePrecision') { + type = 'number'; + } else if (methodName === 'boolean') { + type = 'boolean'; + } else if (methodName === 'timestamp' || methodName === 'datetime') { + type = 'datetime'; + } else if (methodName === 'json' || methodName === 'jsonb') { + type = 'json'; + } else if (methodName === 'blob') { + type = 'blob'; + } else if (methodName === 'notNull') { + nullable = false; + } else if (methodName === 'unique') { + unique = true; + } else if (methodName === 'primaryKey') { + primaryKey = true; + nullable = false; + } else if (methodName.startsWith('default')) { + defaultValue = getExpressionText(this.sourceFile, current.arguments[0]); + } else if (methodName === 'references') { + references = getExpressionText(this.sourceFile, current.arguments[0]); + } + + if (ts.isPropertyAccessExpression(current.expression)) { + current = unwrapExpression(current.expression.expression); + continue; + } + + break; + } + + return { + name: columnName, + type, + nullable, + unique, + primaryKey, + defaultValue, + references, + }; + } +} diff --git a/betterbase/packages/cli/src/utils/schema-scanner.ts b/betterbase/packages/cli/src/utils/schema-scanner.ts new file mode 100644 index 0000000..ff8ea30 --- /dev/null +++ b/betterbase/packages/cli/src/utils/schema-scanner.ts @@ -0,0 +1,2 @@ +export { SchemaScanner } from './scanner'; +export type { ColumnInfo, TableInfo } from './scanner'; diff --git a/betterbase/packages/cli/test/context-generator.test.ts b/betterbase/packages/cli/test/context-generator.test.ts new file mode 100644 index 0000000..2499430 --- /dev/null +++ b/betterbase/packages/cli/test/context-generator.test.ts @@ -0,0 +1,107 @@ +import { mkdirSync, mkdtempSync, readFileSync, rmSync, writeFileSync } from 'node:fs'; +import { tmpdir } from 'node:os'; +import path from 'node:path'; +import { describe, expect, test } from 'bun:test'; +import { ContextGenerator } from '../src/utils/context-generator'; + +describe('ContextGenerator', () => { + test('creates .betterbase-context.json from schema and routes', async () => { + const root = mkdtempSync(path.join(tmpdir(), 'bb-context-')); + + try { + mkdirSync(path.join(root, 'src/db'), { recursive: true }); + mkdirSync(path.join(root, 'src/routes'), { recursive: true }); + + writeFileSync( + path.join(root, 'src/db/schema.ts'), + ` + import { sqliteTable, text } from 'drizzle-orm/sqlite-core'; + export const users = sqliteTable('users', { + id: text('id').primaryKey(), + email: text('email').notNull(), + }); + `, + ); + + writeFileSync( + path.join(root, 'src/routes/index.ts'), + ` + import { Hono } from 'hono'; + const app = new Hono(); + app.get('/health', (c) => c.json({ ok: true })); + export default app; + `, + ); + + const generator = new ContextGenerator(); + const context = await generator.generate(root); + + expect(context.tables.users).toBeDefined(); + expect(context.tables.users.columns.id).toBeDefined(); + expect(context.tables.users.columns.email).toBeDefined(); + expect(context.routes['/health']).toBeDefined(); + + const file = JSON.parse(readFileSync(path.join(root, '.betterbase-context.json'), 'utf-8')); + expect(file.tables.users.name).toBe('users'); + expect(file.tables.users.columns.id.type).toBe('text'); + expect(file.tables.users.columns.email.type).toBe('text'); + expect(Array.isArray(file.routes['/health'])).toBe(true); + expect(file.routes['/health'].length).toBeGreaterThan(0); + expect(file.routes['/health'][0].method).toBe('GET'); + } finally { + rmSync(root, { recursive: true, force: true }); + } + }); + + test('handles missing routes directory with empty routes', async () => { + const root = mkdtempSync(path.join(tmpdir(), 'bb-context-no-routes-')); + + try { + mkdirSync(path.join(root, 'src/db'), { recursive: true }); + writeFileSync( + path.join(root, 'src/db/schema.ts'), + ` + import { sqliteTable, text } from 'drizzle-orm/sqlite-core'; + export const users = sqliteTable('users', { id: text('id').primaryKey() }); + `, + ); + + const context = await new ContextGenerator().generate(root); + expect(context.routes).toEqual({}); + + const file = JSON.parse(readFileSync(path.join(root, '.betterbase-context.json'), 'utf-8')); + expect(file.routes).toEqual({}); + } finally { + rmSync(root, { recursive: true, force: true }); + } + }); + + test('handles empty schema file with empty tables', async () => { + const root = mkdtempSync(path.join(tmpdir(), 'bb-context-empty-schema-')); + + try { + mkdirSync(path.join(root, 'src/db'), { recursive: true }); + mkdirSync(path.join(root, 'src/routes'), { recursive: true }); + writeFileSync(path.join(root, 'src/db/schema.ts'), 'export {};\n'); + + const context = await new ContextGenerator().generate(root); + expect(context.tables).toEqual({}); + } finally { + rmSync(root, { recursive: true, force: true }); + } + }); + + test('handles missing schema file with empty tables', async () => { + const root = mkdtempSync(path.join(tmpdir(), 'bb-context-no-schema-')); + + try { + mkdirSync(path.join(root, 'src/routes'), { recursive: true }); + writeFileSync(path.join(root, 'src/routes/index.ts'), 'export {};\n'); + + const context = await new ContextGenerator().generate(root); + expect(context.tables).toEqual({}); + } finally { + rmSync(root, { recursive: true, force: true }); + } + }); +}); diff --git a/betterbase/packages/cli/test/route-scanner.test.ts b/betterbase/packages/cli/test/route-scanner.test.ts new file mode 100644 index 0000000..9f56991 --- /dev/null +++ b/betterbase/packages/cli/test/route-scanner.test.ts @@ -0,0 +1,45 @@ +import { mkdirSync, mkdtempSync, rmSync, writeFileSync } from 'node:fs'; +import { tmpdir } from 'node:os'; +import path from 'node:path'; +import { describe, expect, test } from 'bun:test'; +import { RouteScanner } from '../src/utils/route-scanner'; + +describe('RouteScanner', () => { + test('extracts hono routes with auth and schemas', async () => { + const root = mkdtempSync(path.join(tmpdir(), 'bb-routes-')); + + try { + const routesDir = path.join(root, 'src/routes'); + mkdirSync(routesDir, { recursive: true }); + + writeFileSync( + path.join(routesDir, 'users.ts'), + ` + import { Hono } from 'hono'; + import { z } from 'zod'; + import { authMiddleware } from '../middleware/auth'; + + const createUserSchema = z.object({ email: z.string().email() }); + export const users = new Hono(); + + users.get('/users', authMiddleware, (c) => c.json({ users: [] })); + users.post('/users', async (c) => { + const body = await c.req.json(); + createUserSchema.parse(body); + return c.json({ ok: true }); + }); + `, + ); + + const scanner = new RouteScanner(); + const routes = scanner.scan(routesDir); + + expect(routes['/users']).toBeDefined(); + expect(routes['/users'].length).toBe(2); + expect(routes['/users'][0].requiresAuth).toBe(true); + expect(routes['/users'][1].inputSchema).toBe('createUserSchema'); + } finally { + rmSync(root, { recursive: true, force: true }); + } + }); +}); diff --git a/betterbase/packages/cli/test/scanner.test.ts b/betterbase/packages/cli/test/scanner.test.ts new file mode 100644 index 0000000..edf5594 --- /dev/null +++ b/betterbase/packages/cli/test/scanner.test.ts @@ -0,0 +1,61 @@ +import { mkdtempSync, rmSync, writeFileSync } from 'node:fs'; +import { tmpdir } from 'node:os'; +import path from 'node:path'; +import { describe, expect, test } from 'bun:test'; +import { SchemaScanner } from '../src/utils/scanner'; + +describe('SchemaScanner', () => { + test('extracts tables, columns, relations, and indexes from drizzle schema', () => { + const dir = mkdtempSync(path.join(tmpdir(), 'bb-scanner-')); + + try { + const schemaPath = path.join(dir, 'schema.ts'); + writeFileSync( + schemaPath, + ` + import { sqliteTable, text, integer, index } from 'drizzle-orm/sqlite-core'; + + export const users = sqliteTable('users', { + id: text('id').primaryKey(), + email: text('email').notNull().unique(), + age: integer('age').default(18), + }, (table) => ({ + usersEmailIdx: index('users_email_idx').on(table.email), + })); + + export const posts = sqliteTable('posts', { + id: text('id').primaryKey(), + userId: text('user_id').notNull().references(() => users.id), + title: text('title').notNull(), + }); + + export const comments = sqliteTable('comments', { + id: text('id').primaryKey(), + postId: text('post_id').notNull().references(() => posts.id), + body: text('body'), + }); + `, + ); + + const scanner = new SchemaScanner(schemaPath); + const tables = scanner.scan(); + + expect(Object.keys(tables)).toEqual(['users', 'posts', 'comments']); + + expect(tables.users.name).toBe('users'); + expect(tables.users.columns.id.primaryKey).toBe(true); + expect(tables.users.columns.id.nullable).toBe(false); + expect(tables.users.columns.email.unique).toBe(true); + expect(tables.users.columns.age.defaultValue).toBe('18'); + expect(tables.users.indexes).toContain('usersEmailIdx'); + + expect(tables.posts.columns.userId.references).toBe('() => users.id'); + expect(tables.posts.relations).toContain('() => users.id'); + + expect(tables.comments.columns.postId.references).toBe('() => posts.id'); + expect(tables.comments.relations).toContain('() => posts.id'); + } finally { + rmSync(dir, { recursive: true, force: true }); + } + }); +}); diff --git a/betterbase/packages/cli/test/smoke.test.ts b/betterbase/packages/cli/test/smoke.test.ts index f082470..5c49ed4 100644 --- a/betterbase/packages/cli/test/smoke.test.ts +++ b/betterbase/packages/cli/test/smoke.test.ts @@ -14,9 +14,42 @@ describe('cli', () => { expect(init?.registeredArguments[0]?.name()).toBe('project-name'); }); - test('registers migrate command', () => { + + + + test('registers generate crud command', () => { + const program = createProgram(); + const generate = program.commands.find((command) => command.name() === 'generate'); + expect(generate).toBeDefined(); + + const crud = generate?.commands.find((command) => command.name() === 'crud'); + expect(crud).toBeDefined(); + }); + + test('registers auth setup command', () => { + const program = createProgram(); + const auth = program.commands.find((command) => command.name() === 'auth'); + expect(auth).toBeDefined(); + + const setup = auth?.commands.find((command) => command.name() === 'setup'); + expect(setup).toBeDefined(); + }); + + test('registers dev command', () => { + const program = createProgram(); + const dev = program.commands.find((command) => command.name() === 'dev'); + expect(dev).toBeDefined(); + }); + + test('registers migrate commands', () => { const program = createProgram(); + const migrate = program.commands.find((command) => command.name() === 'migrate'); + const preview = program.commands.find((command) => command.name() === 'migrate:preview'); + const production = program.commands.find((command) => command.name() === 'migrate:production'); + expect(migrate).toBeDefined(); + expect(preview).toBeDefined(); + expect(production).toBeDefined(); }); }); diff --git a/betterbase/templates/base/README.md b/betterbase/templates/base/README.md index e6d4cf1..8d481e3 100644 --- a/betterbase/templates/base/README.md +++ b/betterbase/templates/base/README.md @@ -26,3 +26,15 @@ src/ betterbase.config.ts drizzle.config.ts ``` + + +## Quick Start + +- Install dependencies: `bun install` +- Start development server: `bun run dev` +- Generate Drizzle migrations: `bun run db:generate` +- Apply migrations locally: `bun run db:push` +- Build for production: `bun run build` +- Start production server: `bun run start` + +Environment variables are validated in `src/lib/env.ts` (`NODE_ENV`, `PORT`, `DB_PATH`). diff --git a/betterbase/templates/base/package.json b/betterbase/templates/base/package.json index e631611..c74eee0 100644 --- a/betterbase/templates/base/package.json +++ b/betterbase/templates/base/package.json @@ -6,7 +6,9 @@ "dev": "bun --hot run src/index.ts", "db:generate": "drizzle-kit generate", "db:push": "bun run src/db/migrate.ts", - "typecheck": "tsc --noEmit" + "typecheck": "tsc --noEmit", + "build": "bun build src/index.ts --outfile dist/index.js --target bun", + "start": "bun run dist/index.js" }, "dependencies": { "hono": "^4.6.10", diff --git a/betterbase/templates/base/src/db/index.ts b/betterbase/templates/base/src/db/index.ts index ddbbc3f..485c7b4 100644 --- a/betterbase/templates/base/src/db/index.ts +++ b/betterbase/templates/base/src/db/index.ts @@ -1,8 +1,10 @@ import { Database } from 'bun:sqlite'; import { drizzle } from 'drizzle-orm/bun-sqlite'; +import { env } from '../lib/env'; import * as schema from './schema'; -const dbPath = process.env.DB_PATH ?? 'local.db'; -const sqlite = new Database(dbPath, { create: true }); + +// env.DB_PATH is always present because env schema provides a default. +const sqlite = new Database(env.DB_PATH, { create: true }); export const db = drizzle(sqlite, { schema }); diff --git a/betterbase/templates/base/src/index.ts b/betterbase/templates/base/src/index.ts index 4065ba5..359c1c5 100644 --- a/betterbase/templates/base/src/index.ts +++ b/betterbase/templates/base/src/index.ts @@ -1,4 +1,5 @@ import { Hono } from 'hono'; +import { env } from './lib/env'; import { registerRoutes } from './routes'; const app = new Hono(); @@ -6,8 +7,8 @@ registerRoutes(app); const server = Bun.serve({ fetch: app.fetch, - port: Number(process.env.PORT ?? 3000), - development: process.env.NODE_ENV === 'development', + port: env.PORT, + development: env.NODE_ENV === 'development', }); console.log(`šŸš€ Server running at http://localhost:${server.port}`); diff --git a/betterbase/templates/base/src/lib/env.ts b/betterbase/templates/base/src/lib/env.ts index c4a0b38..c61b432 100644 --- a/betterbase/templates/base/src/lib/env.ts +++ b/betterbase/templates/base/src/lib/env.ts @@ -1,8 +1,11 @@ import { z } from 'zod'; +export const DEFAULT_DB_PATH = 'local.db'; + const envSchema = z.object({ NODE_ENV: z.enum(['development', 'test', 'production']).default('development'), PORT: z.coerce.number().default(3000), + DB_PATH: z.string().min(1).default(DEFAULT_DB_PATH), }); export const env = envSchema.parse(process.env); diff --git a/betterbase/templates/base/src/routes/index.ts b/betterbase/templates/base/src/routes/index.ts index 64a9e83..2fb73e6 100644 --- a/betterbase/templates/base/src/routes/index.ts +++ b/betterbase/templates/base/src/routes/index.ts @@ -2,6 +2,7 @@ import { Hono } from 'hono'; import { cors } from 'hono/cors'; import { logger } from 'hono/logger'; import { HTTPException } from 'hono/http-exception'; +import { env } from '../lib/env'; import { healthRoute } from './health'; import { usersRoute } from './users'; @@ -11,12 +12,12 @@ export function registerRoutes(app: Hono): void { app.onError((err, c) => { const isHttpError = err instanceof HTTPException; - const showDetailedError = process.env.NODE_ENV === 'development' || isHttpError; + const showDetailedError = env.NODE_ENV === 'development' || isHttpError; return c.json( { error: showDetailedError ? err.message : 'Internal Server Error', - stack: process.env.NODE_ENV === 'development' ? err.stack : undefined, + stack: env.NODE_ENV === 'development' ? err.stack : undefined, details: isHttpError ? (err as { cause?: unknown }).cause ?? null : null, }, isHttpError ? err.status : 500, diff --git a/betterbase/templates/base/src/routes/users.ts b/betterbase/templates/base/src/routes/users.ts index 29f17c6..22fff3d 100644 --- a/betterbase/templates/base/src/routes/users.ts +++ b/betterbase/templates/base/src/routes/users.ts @@ -1,6 +1,6 @@ import { Hono } from 'hono'; import { HTTPException } from 'hono/http-exception'; -import { z } from 'zod'; +import { z, ZodError } from 'zod'; import { db } from '../db'; import { users } from '../db/schema'; import { parseBody } from '../middleware/validation'; @@ -10,11 +10,69 @@ export const createUserSchema = z.object({ name: z.string().min(1), }); +const DEFAULT_LIMIT = 25; +const MAX_LIMIT = 100; +const DEFAULT_OFFSET = 0; + +const paginationSchema = z.object({ + limit: z.coerce.number().int().nonnegative().default(DEFAULT_LIMIT), + offset: z.coerce.number().int().nonnegative().default(DEFAULT_OFFSET), +}); + export const usersRoute = new Hono(); usersRoute.get('/', async (c) => { - const allUsers = await db.select().from(users); - return c.json({ users: allUsers }); + try { + const pagination = paginationSchema.parse({ + limit: c.req.query('limit'), + offset: c.req.query('offset'), + }); + + const limit = Math.min(pagination.limit, MAX_LIMIT); + const offset = pagination.offset; + + if (limit === 0) { + return c.json({ + users: [], + pagination: { + limit, + offset, + // No DB query is run for limit=0, so hasMore cannot be determined. + hasMore: null, + }, + }); + } + + const rows = await db.select().from(users).limit(limit + 1).offset(offset); + const hasMore = rows.length > limit; + const paginatedUsers = rows.slice(0, limit); + + return c.json({ + users: paginatedUsers, + pagination: { + limit, + offset, + hasMore, + }, + }); + } catch (error) { + if (error instanceof HTTPException) { + throw error; + } + + if (error instanceof ZodError) { + return c.json( + { + error: 'Invalid pagination query parameters', + details: error.issues, + }, + 400, + ); + } + + console.error('Failed to fetch users:', error); + throw error; + } }); usersRoute.post('/', async (c) => { diff --git a/betterbase/tsconfig.base.json b/betterbase/tsconfig.base.json index b1d6a90..574a5e4 100644 --- a/betterbase/tsconfig.base.json +++ b/betterbase/tsconfig.base.json @@ -8,6 +8,8 @@ "skipLibCheck": true, "resolveJsonModule": true, "isolatedModules": true, - "forceConsistentCasingInFileNames": true + "forceConsistentCasingInFileNames": true, + "declaration": true, + "declarationDir": "dist/types" } } From 5f5c40c48c8e74564d5d6d78903f053d60a76428 Mon Sep 17 00:00:00 2001 From: Ziad Khaled Date: Thu, 19 Feb 2026 06:18:13 +0200 Subject: [PATCH 11/12] Add Bun WebSocket realtime server and CRUD broadcasts --- betterbase/.gitignore | 7 +- betterbase/README.md | 12 + betterbase/apps/cli/tsconfig.json | 11 +- betterbase/package.json | 4 +- betterbase/packages/cli/package.json | 6 +- betterbase/packages/cli/src/build.ts | 18 +- betterbase/packages/cli/src/commands/auth.ts | 238 ++++++++++++ betterbase/packages/cli/src/commands/dev.ts | 51 +++ .../packages/cli/src/commands/generate.ts | 286 ++++++++++++++ betterbase/packages/cli/src/commands/init.ts | 88 ++++- .../packages/cli/src/commands/migrate.ts | 350 +++++++++++++++++- betterbase/packages/cli/src/constants.ts | 1 + betterbase/packages/cli/src/index.ts | 56 ++- .../cli/src/utils/context-generator.ts | 84 +++++ .../packages/cli/src/utils/route-scanner.ts | 165 +++++++++ betterbase/packages/cli/src/utils/scanner.ts | 252 +++++++++++++ .../packages/cli/src/utils/schema-scanner.ts | 2 + .../cli/test/context-generator.test.ts | 107 ++++++ .../packages/cli/test/route-scanner.test.ts | 45 +++ betterbase/packages/cli/test/scanner.test.ts | 61 +++ betterbase/packages/cli/test/smoke.test.ts | 35 +- betterbase/templates/base/README.md | 12 + betterbase/templates/base/package.json | 4 +- betterbase/templates/base/src/db/index.ts | 6 +- betterbase/templates/base/src/index.ts | 17 +- betterbase/templates/base/src/lib/env.ts | 3 + betterbase/templates/base/src/lib/realtime.ts | 151 ++++++++ betterbase/templates/base/src/routes/index.ts | 5 +- betterbase/templates/base/src/routes/users.ts | 64 +++- betterbase/tsconfig.base.json | 4 +- 30 files changed, 2085 insertions(+), 60 deletions(-) create mode 100644 betterbase/packages/cli/src/commands/auth.ts create mode 100644 betterbase/packages/cli/src/commands/dev.ts create mode 100644 betterbase/packages/cli/src/commands/generate.ts create mode 100644 betterbase/packages/cli/src/constants.ts create mode 100644 betterbase/packages/cli/src/utils/context-generator.ts create mode 100644 betterbase/packages/cli/src/utils/route-scanner.ts create mode 100644 betterbase/packages/cli/src/utils/scanner.ts create mode 100644 betterbase/packages/cli/src/utils/schema-scanner.ts create mode 100644 betterbase/packages/cli/test/context-generator.test.ts create mode 100644 betterbase/packages/cli/test/route-scanner.test.ts create mode 100644 betterbase/packages/cli/test/scanner.test.ts create mode 100644 betterbase/templates/base/src/lib/realtime.ts diff --git a/betterbase/.gitignore b/betterbase/.gitignore index 51c8bb1..240b1f4 100644 --- a/betterbase/.gitignore +++ b/betterbase/.gitignore @@ -3,14 +3,17 @@ node_modules .turbo dist .next +*.sqlite3 +*.sqlite +*.db +*.tsbuildinfo +.betterbase-context.json .vscode/ .idea/ .env .env.* -.env.local -.env.test !.env.example *.log diff --git a/betterbase/README.md b/betterbase/README.md index 46bf456..ebc5905 100644 --- a/betterbase/README.md +++ b/betterbase/README.md @@ -19,6 +19,18 @@ Initial BetterBase monorepo scaffold with a concrete base template. - Workspace orchestration: **Turborepo** - Language: **TypeScript** + +## Monorepo Commands + +From the monorepo root: + +- `bun install` +- `bun run dev` +- `bun run build` +- `bun run typecheck` (runs `turbo run typecheck --filter '*'`) + +> Note: `templates/base` is not in the root workspace graph (`apps/*`, `packages/*`), so run template checks separately (e.g. `cd templates/base && bun run typecheck`). + ## Base Template Commands From `templates/base`: diff --git a/betterbase/apps/cli/tsconfig.json b/betterbase/apps/cli/tsconfig.json index 4031161..cd17ee5 100644 --- a/betterbase/apps/cli/tsconfig.json +++ b/betterbase/apps/cli/tsconfig.json @@ -3,7 +3,14 @@ "compilerOptions": { "outDir": "dist", "rootDir": ".", - "types": ["bun"] + "types": [ + "bun" + ] }, - "include": ["src", "test"] + "include": [ + "src/**/*.ts", + "src/**/*.tsx", + "test/**/*.ts", + "test/**/*.tsx" + ] } diff --git a/betterbase/package.json b/betterbase/package.json index eaa5a17..0f4c3d8 100644 --- a/betterbase/package.json +++ b/betterbase/package.json @@ -1,7 +1,7 @@ { "name": "betterbase", "private": true, - "packageManager": "bun@1.1.38", + "packageManager": "bun@1.2.14", "workspaces": [ "apps/*", "packages/*" @@ -10,7 +10,7 @@ "build": "turbo run build", "dev": "turbo run dev --parallel", "lint": "turbo run lint", - "typecheck": "turbo run typecheck" + "typecheck": "turbo run typecheck --filter '*'" }, "devDependencies": { "turbo": "^2.0.0", diff --git a/betterbase/packages/cli/package.json b/betterbase/packages/cli/package.json index 5b8543d..54f6fb8 100644 --- a/betterbase/packages/cli/package.json +++ b/betterbase/packages/cli/package.json @@ -16,11 +16,11 @@ "chalk": "^5.3.0", "commander": "^12.1.0", "inquirer": "^10.2.2", - "zod": "^3.23.8" + "zod": "^3.23.8", + "typescript": "^5.3.0" }, "devDependencies": { - "@types/bun": "^1.3.9", - "typescript": "^5.9.3" + "@types/bun": "^1.3.9" }, "exports": { ".": "./src/index.ts" diff --git a/betterbase/packages/cli/src/build.ts b/betterbase/packages/cli/src/build.ts index 198205e..86937ef 100644 --- a/betterbase/packages/cli/src/build.ts +++ b/betterbase/packages/cli/src/build.ts @@ -1,10 +1,16 @@ +import path from 'node:path'; + /** * Build the CLI as a standalone bundled executable output. */ export async function buildStandaloneCli(): Promise { + const moduleDir = import.meta.dir; + const entrypoint = path.resolve(moduleDir, 'index.ts'); + const outdir = path.resolve(moduleDir, '../dist'); + const result = await Bun.build({ - entrypoints: ['./src/index.ts'], - outdir: './dist', + entrypoints: [entrypoint], + outdir, target: 'bun', format: 'esm', minify: false, @@ -17,7 +23,7 @@ export async function buildStandaloneCli(): Promise { throw new Error(`Build failed with ${result.logs.length} error(s).\n${diagnostics}`); } - const outputPath = './dist/index.js'; + const outputPath = path.join(outdir, 'index.js'); const compiled = await Bun.file(outputPath).text(); await Bun.write(outputPath, `#!/usr/bin/env bun\n${compiled}`); } @@ -26,11 +32,7 @@ async function main(): Promise { await buildStandaloneCli(); } -const isEsmMain = typeof import.meta !== 'undefined' && import.meta.main; -const cjs = globalThis as unknown as { require?: { main?: unknown }; module?: unknown }; -const isCjsMain = cjs.require?.main !== undefined && cjs.require.main === cjs.module; - -if (isEsmMain || isCjsMain) { +if (import.meta.main) { main().catch((error) => { console.error('Build failed:', error); process.exit(1); diff --git a/betterbase/packages/cli/src/commands/auth.ts b/betterbase/packages/cli/src/commands/auth.ts new file mode 100644 index 0000000..3dbc76e --- /dev/null +++ b/betterbase/packages/cli/src/commands/auth.ts @@ -0,0 +1,238 @@ +import { execSync } from 'node:child_process'; +import { existsSync, mkdirSync, readFileSync, writeFileSync } from 'node:fs'; +import path from 'node:path'; +import * as logger from '../utils/logger'; + +const AUTH_SCHEMA_BLOCK = ` +// Auth tables (generated by BetterAuth) +export const sessions = sqliteTable('sessions', { + id: text('id').primaryKey(), + userId: text('user_id').notNull().references(() => users.id), + expiresAt: integer('expires_at', { mode: 'timestamp' }).notNull(), + ipAddress: text('ip_address'), + userAgent: text('user_agent'), +}); + +export const accounts = sqliteTable('accounts', { + id: text('id').primaryKey(), + userId: text('user_id').notNull().references(() => users.id), + provider: text('provider').notNull(), + providerAccountId: text('provider_account_id').notNull(), + accessToken: text('access_token'), + refreshToken: text('refresh_token'), + expiresAt: integer('expires_at', { mode: 'timestamp' }), +}); +`; + +const AUTH_ROUTE_FILE = `import { Hono } from 'hono'; +import { z } from 'zod'; +import { eq } from 'drizzle-orm'; +import { db } from '../db'; +import { users, sessions } from '../db/schema'; + +const authRoute = new Hono(); + +const loginSchema = z.object({ + email: z.string().email(), + password: z.string().min(8), +}); + +authRoute.post('/login', async (c) => { + const body = loginSchema.parse(await c.req.json()); + + const user = await db.select().from(users).where(eq(users.email, body.email)).limit(1); + if (user.length === 0) { + return c.json({ error: 'Invalid credentials' }, 401); + } + + const sessionId = crypto.randomUUID(); + const expiresAt = new Date(Date.now() + 30 * 24 * 60 * 60 * 1000); + + await db.insert(sessions).values({ + id: sessionId, + userId: user[0].id, + expiresAt, + ipAddress: c.req.header('cf-connecting-ip') || c.req.header('x-forwarded-for') || null, + userAgent: c.req.header('user-agent') || null, + }); + + return c.json({ + token: sessionId, + user: { + id: user[0].id, + email: user[0].email, + name: user[0].name, + }, + }); +}); + +authRoute.post('/logout', async (c) => { + const token = c.req.header('Authorization')?.split(' ')[1]; + if (token) { + await db.delete(sessions).where(eq(sessions.id, token)); + } + + return c.json({ message: 'Logged out' }); +}); + +export { authRoute }; +`; + +const AUTH_MIDDLEWARE_FILE = `import { and, eq, gt } from 'drizzle-orm'; +import { createMiddleware } from 'hono/factory'; +import { db } from '../db'; +import { sessions, users } from '../db/schema'; + +export interface AuthContext { + user: { + id: string; + email: string; + name: string | null; + }; +} + +function getSessionToken(authHeader: string | undefined): string | null { + if (!authHeader) return null; + + const parts = authHeader.split(' '); + if (parts.length !== 2 || parts[0] !== 'Bearer') return null; + + return parts[1]; +} + +async function validateSession(token: string): Promise { + const session = await db + .select() + .from(sessions) + .where(and(eq(sessions.id, token), gt(sessions.expiresAt, new Date()))) + .limit(1); + + if (session.length === 0) return null; + + const user = await db.select().from(users).where(eq(users.id, session[0].userId)).limit(1); + return user.length > 0 ? user[0] : null; +} + +export const requireAuth = createMiddleware<{ Variables: AuthContext }>(async (c, next) => { + const token = getSessionToken(c.req.header('Authorization')); + + if (!token) { + return c.json({ error: 'Unauthorized: No token provided' }, 401); + } + + const user = await validateSession(token); + if (!user) { + return c.json({ error: 'Unauthorized: Invalid or expired token' }, 401); + } + + c.set('user', user); + await next(); +}); + +export const optionalAuth = createMiddleware<{ Variables: Partial }>(async (c, next) => { + const token = getSessionToken(c.req.header('Authorization')); + + if (token) { + const user = await validateSession(token); + if (user) { + c.set('user', user); + } + } + + await next(); +}); + +export function getUser(c: { get: (key: 'user') => AuthContext['user'] }): AuthContext['user'] { + return c.get('user'); +} +`; + +function appendIfMissing(filePath: string, marker: string, content: string): void { + const current = readFileSync(filePath, 'utf-8'); + if (current.includes(marker)) { + return; + } + + const next = current.trimEnd() + '\n\n' + content.trim() + '\n'; + writeFileSync(filePath, next); +} + +function ensureAuthInConfig(projectRoot: string): void { + const configPath = path.join(projectRoot, 'betterbase.config.ts'); + if (!existsSync(configPath)) return; + + const current = readFileSync(configPath, 'utf-8'); + if (current.includes('auth: {')) return; + + const updated = current.replace( + 'export default {', + `export default {\n auth: {\n enabled: true,\n secret: process.env.AUTH_SECRET,\n sessionDuration: 30 * 24 * 60 * 60,\n },`, + ); + + writeFileSync(configPath, updated); +} + +function ensureEnvVar(projectRoot: string): void { + const envPath = path.join(projectRoot, '.env.example'); + if (!existsSync(envPath)) return; + + const env = readFileSync(envPath, 'utf-8'); + if (env.includes('AUTH_SECRET=')) return; + + writeFileSync(envPath, `${env.trimEnd()}\n\n# Auth\nAUTH_SECRET=your-secret-key-here-change-in-production\n`); +} + +function ensureRoutesIndexHook(projectRoot: string): void { + const routesIndexPath = path.join(projectRoot, 'src/routes/index.ts'); + if (!existsSync(routesIndexPath)) return; + + let current = readFileSync(routesIndexPath, 'utf-8'); + + if (!current.includes("import { authRoute } from './auth';")) { + current = current.replace("import { usersRoute } from './users';", "import { usersRoute } from './users';\nimport { authRoute } from './auth';"); + } + + if (!current.includes("app.route('/auth', authRoute);")) { + current = current.replace("app.route('/api/users', usersRoute);", "app.route('/api/users', usersRoute);\n app.route('/auth', authRoute);"); + } + + writeFileSync(routesIndexPath, current); +} + +export async function runAuthSetupCommand(projectRoot: string = process.cwd()): Promise { + const resolvedRoot = path.resolve(projectRoot); + const schemaPath = path.join(resolvedRoot, 'src/db/schema.ts'); + const middlewarePath = path.join(resolvedRoot, 'src/middleware/auth.ts'); + const routePath = path.join(resolvedRoot, 'src/routes/auth.ts'); + + if (!existsSync(schemaPath)) { + throw new Error(`Could not find schema file at ${schemaPath}. Run this command from a BetterBase app root.`); + } + + logger.info('šŸ” Setting up authentication...'); + + logger.info('šŸ“¦ Installing better-auth...'); + execSync('bun add better-auth', { cwd: resolvedRoot, stdio: 'inherit' }); + + logger.info('šŸ“ Adding auth tables to schema...'); + appendIfMissing(schemaPath, "export const sessions = sqliteTable('sessions'", AUTH_SCHEMA_BLOCK); + + logger.info('šŸ›”ļø Creating auth middleware...'); + mkdirSync(path.dirname(middlewarePath), { recursive: true }); + writeFileSync(middlewarePath, AUTH_MIDDLEWARE_FILE); + + logger.info('🧭 Creating auth routes...'); + mkdirSync(path.dirname(routePath), { recursive: true }); + writeFileSync(routePath, AUTH_ROUTE_FILE); + ensureRoutesIndexHook(resolvedRoot); + + logger.info('āš™ļø Updating config...'); + ensureAuthInConfig(resolvedRoot); + ensureEnvVar(resolvedRoot); + + logger.success('Authentication setup complete!'); + console.log('\nNext steps:'); + console.log(' 1. Set AUTH_SECRET in .env'); + console.log(' 2. Run: bun run db:push'); + console.log(' 3. Use requireAuth middleware on protected routes'); +} diff --git a/betterbase/packages/cli/src/commands/dev.ts b/betterbase/packages/cli/src/commands/dev.ts new file mode 100644 index 0000000..3720944 --- /dev/null +++ b/betterbase/packages/cli/src/commands/dev.ts @@ -0,0 +1,51 @@ +import { existsSync, watch } from 'node:fs'; +import path from 'node:path'; +import { ContextGenerator } from '../utils/context-generator'; +import * as logger from '../utils/logger'; + +export async function runDevCommand(projectRoot: string = process.cwd()): Promise { + const generator = new ContextGenerator(); + + await generator.generate(projectRoot); + + const watchPaths = [path.join(projectRoot, 'src/db/schema.ts'), path.join(projectRoot, 'src/routes')]; + const timers = new Map>(); + + for (const watchPath of watchPaths) { + if (!existsSync(watchPath)) { + logger.warn(`Watch path does not exist; skipping: ${watchPath}`); + continue; + } + + try { + watch(watchPath, { recursive: true }, (_eventType, filename) => { + console.log(`šŸ“ File changed: ${String(filename ?? '')}`); + + const existing = timers.get(watchPath); + if (existing) { + clearTimeout(existing); + } + + const timer = setTimeout(async () => { + console.log('šŸ”„ Regenerating context...'); + const start = Date.now(); + + try { + await generator.generate(projectRoot); + console.log(`āœ… Context updated in ${Date.now() - start}ms`); + } catch (error) { + const message = error instanceof Error ? error.message : String(error); + console.error(`āŒ Failed to regenerate context: ${message}`); + } + }, 250); + + timers.set(watchPath, timer); + }); + } catch (error) { + const message = error instanceof Error ? error.message : String(error); + logger.warn(`Failed to watch path ${watchPath}: ${message}`); + } + } + + console.log('šŸ‘€ Watching for schema and route changes...'); +} diff --git a/betterbase/packages/cli/src/commands/generate.ts b/betterbase/packages/cli/src/commands/generate.ts new file mode 100644 index 0000000..3035c08 --- /dev/null +++ b/betterbase/packages/cli/src/commands/generate.ts @@ -0,0 +1,286 @@ +import { execSync } from 'node:child_process'; +import { existsSync, mkdirSync, readFileSync, writeFileSync } from 'node:fs'; +import path from 'node:path'; +import { SchemaScanner, type TableInfo } from '../utils/schema-scanner'; +import * as logger from '../utils/logger'; + + +const REALTIME_FILE = `import type { ServerWebSocket } from 'bun'; + +interface Client { + ws: ServerWebSocket; + subscriptions: Set; +} + +class RealtimeServer { + private clients = new Map, Client>(); + private tableSubscribers = new Map>>(); + + handleConnection(ws: ServerWebSocket): void { + this.clients.set(ws, { ws, subscriptions: new Set() }); + } + + handleMessage(ws: ServerWebSocket, rawMessage: string): void { + try { + const data = JSON.parse(rawMessage) as { type?: string; table?: string }; + if (!data.type || !data.table) return; + + if (data.type === 'subscribe') { + if (!this.tableSubscribers.has(data.table)) this.tableSubscribers.set(data.table, new Set()); + this.tableSubscribers.get(data.table)?.add(ws); + } + + if (data.type === 'unsubscribe') { + this.tableSubscribers.get(data.table)?.delete(ws); + } + } catch { + // ignore malformed payloads + } + } + + handleClose(ws: ServerWebSocket): void { + this.clients.delete(ws); + for (const subscribers of this.tableSubscribers.values()) { + subscribers.delete(ws); + } + } + + broadcast(table: string, event: 'INSERT' | 'UPDATE' | 'DELETE', data: unknown): void { + const subscribers = this.tableSubscribers.get(table); + if (!subscribers) return; + + const payload = JSON.stringify({ type: 'update', table, event, data, timestamp: new Date().toISOString() }); + for (const ws of subscribers) { + ws.send(payload); + } + } +} + +export const realtime = new RealtimeServer(); +`; + +function ensureRealtimeUtility(projectRoot: string): void { + const realtimePath = path.join(projectRoot, 'src/lib/realtime.ts'); + if (existsSync(realtimePath)) { + return; + } + + mkdirSync(path.dirname(realtimePath), { recursive: true }); + writeFileSync(realtimePath, REALTIME_FILE); +} + + +function toSingular(name: string): string { + return name.endsWith('s') ? name.slice(0, -1) : `${name}Item`; +} + +function schemaTypeToZod(type: string): string { + if (type === 'integer' || type === 'number') { + return 'z.coerce.number()'; + } + + if (type === 'boolean') { + return 'z.coerce.boolean()'; + } + + if (type === 'json') { + return 'z.unknown()'; + } + + if (type === 'datetime') { + return 'z.coerce.date()'; + } + + return 'z.string()'; +} + +function buildSchemaShape(table: TableInfo, mode: 'create' | 'update'): string { + const entries = Object.entries(table.columns) + .filter(([columnName, column]) => !(column.primaryKey || columnName === 'id')) + .map(([columnName, column]) => { + const base = schemaTypeToZod(column.type); + const optional = mode === 'update' || column.nullable || Boolean(column.defaultValue); + return ` ${columnName}: ${optional ? `${base}.optional()` : base}`; + }); + + return entries.join(',\n'); +} + +function generateRouteFile(tableName: string, table: TableInfo): string { + const singular = toSingular(tableName); + const createShape = buildSchemaShape(table, 'create'); + const updateShape = buildSchemaShape(table, 'update'); + + return `import { and, asc, desc, eq } from 'drizzle-orm'; +import { Hono } from 'hono'; +import { zValidator } from '@hono/zod-validator'; +import { z } from 'zod'; +import { db } from '../db'; +import { realtime } from '../lib/realtime'; +import { ${tableName} } from '../db/schema'; + +export const ${tableName}Route = new Hono(); + +const createSchema = z.object({ +${createShape} +}); + +const updateSchema = z.object({ +${updateShape} +}); + +${tableName}Route.get('/', async (c) => { + const limit = Number(c.req.query('limit') ?? 50); + const offset = Number(c.req.query('offset') ?? 0); + const safeLimit = Number.isFinite(limit) && limit >= 0 ? Math.min(limit, 100) : 50; + const safeOffset = Number.isFinite(offset) && offset >= 0 ? offset : 0; + + const queryParams = c.req.query(); + const sort = queryParams.sort; + + const filters = Object.entries(queryParams).filter(([key, value]) => { + return key !== 'limit' && key !== 'offset' && key !== 'sort' && value !== undefined; + }); + + let query = db.select().from(${tableName}).$dynamic(); + + if (filters.length > 0) { + const conditions = filters + .filter(([key]) => key in ${tableName}) + .map(([key, value]) => eq(${tableName}[key as keyof typeof ${tableName}] as never, value as never)); + + if (conditions.length > 0) { + query = query.where(and(...conditions)); + } + } + + if (sort) { + const [field, order] = sort.split(':'); + if (field && field in ${tableName}) { + const column = ${tableName}[field as keyof typeof ${tableName}] as never; + query = query.orderBy(order === 'desc' ? desc(column) : asc(column)); + } + } + + const items = await query.limit(safeLimit).offset(safeOffset); + return c.json({ ${tableName}: items, count: items.length, pagination: { limit: safeLimit, offset: safeOffset } }); +}); + +${tableName}Route.get('/:id', async (c) => { + const id = c.req.param('id'); + const item = await db.select().from(${tableName}).where(eq(${tableName}.id, id as never)).limit(1); + + if (item.length === 0) { + return c.json({ error: '${tableName} not found' }, 404); + } + + return c.json({ ${singular}: item[0] }); +}); + +${tableName}Route.post('/', zValidator('json', createSchema), async (c) => { + const body = c.req.valid('json'); + const created = await db.insert(${tableName}).values(body).returning(); + realtime.broadcast('${tableName}', 'INSERT', created[0]); + return c.json({ ${singular}: created[0] }, 201); +}); + +${tableName}Route.patch('/:id', zValidator('json', updateSchema), async (c) => { + const id = c.req.param('id'); + const body = c.req.valid('json'); + + const updated = await db.update(${tableName}).set(body).where(eq(${tableName}.id, id as never)).returning(); + if (updated.length === 0) { + return c.json({ error: '${tableName} not found' }, 404); + } + + realtime.broadcast('${tableName}', 'UPDATE', updated[0]); + return c.json({ ${singular}: updated[0] }); +}); + +${tableName}Route.delete('/:id', async (c) => { + const id = c.req.param('id'); + const deleted = await db.delete(${tableName}).where(eq(${tableName}.id, id as never)).returning(); + + if (deleted.length === 0) { + return c.json({ error: '${tableName} not found' }, 404); + } + + realtime.broadcast('${tableName}', 'DELETE', { id }); + return c.json({ message: '${singular} deleted', ${singular}: deleted[0] }); +}); +`; +} + +function updateMainRouter(projectRoot: string, tableName: string): void { + const routerPath = path.join(projectRoot, 'src/routes/index.ts'); + if (!existsSync(routerPath)) { + logger.warn(`Routes index not found at ${routerPath}. Please wire the route manually.`); + return; + } + + let router = readFileSync(routerPath, 'utf-8'); + const importLine = `import { ${tableName}Route } from './${tableName}';`; + const routeLine = ` app.route('/api/${tableName}', ${tableName}Route);`; + + if (!router.includes(importLine)) { + const firstRouteImport = /import\s+\{\s*healthRoute\s*\}\s+from\s+'\.\/health';/; + if (firstRouteImport.test(router)) { + router = router.replace(firstRouteImport, (m) => `${m}\n${importLine}`); + } else { + router = `${importLine}\n${router}`; + } + } + + if (!router.includes(routeLine)) { + const routeStatements = [...router.matchAll(/\s*app\.route\([^\n]+\);/g)]; + if (routeStatements.length > 0) { + const last = routeStatements[routeStatements.length - 1]; + const insertAt = (last.index ?? 0) + last[0].length; + router = `${router.slice(0, insertAt)}\n${routeLine}${router.slice(insertAt)}`; + } else { + router = router.replace(/\n}\s*$/, `\n${routeLine}\n}`); + } + } + + writeFileSync(routerPath, router); +} + +export async function runGenerateCrudCommand(projectRoot: string, tableName: string): Promise { + const resolvedRoot = path.resolve(projectRoot); + const schemaPath = path.join(resolvedRoot, 'src/db/schema.ts'); + + if (!existsSync(schemaPath)) { + throw new Error(`Schema file not found at ${schemaPath}`); + } + + logger.info(`šŸ”Ø Generating CRUD for ${tableName}...`); + + const scanner = new SchemaScanner(schemaPath); + const tables = scanner.scan(); + + const table = tables[tableName]; + if (!table) { + throw new Error(`Table "${tableName}" not found in schema.`); + } + + logger.info('šŸ“¦ Installing @hono/zod-validator...'); + execSync('bun add @hono/zod-validator', { cwd: resolvedRoot, stdio: 'inherit' }); + + ensureRealtimeUtility(resolvedRoot); + + const routesDir = path.join(resolvedRoot, 'src/routes'); + mkdirSync(routesDir, { recursive: true }); + + const routePath = path.join(routesDir, `${tableName}.ts`); + writeFileSync(routePath, generateRouteFile(tableName, table)); + + updateMainRouter(resolvedRoot, tableName); + + logger.success(`Generated ${routePath}`); + console.log('\nEndpoints created:'); + console.log(` GET /api/${tableName}`); + console.log(` GET /api/${tableName}/:id`); + console.log(` POST /api/${tableName}`); + console.log(` PATCH /api/${tableName}/:id`); + console.log(` DELETE /api/${tableName}/:id`); +} diff --git a/betterbase/packages/cli/src/commands/init.ts b/betterbase/packages/cli/src/commands/init.ts index 5859b89..abe5481 100644 --- a/betterbase/packages/cli/src/commands/init.ts +++ b/betterbase/packages/cli/src/commands/init.ts @@ -63,8 +63,8 @@ async function initializeGitRepository(projectPath: string): Promise { function buildPackageJson(projectName: string, databaseMode: DatabaseMode, useAuth: boolean): string { const dependencies: Record = { hono: '^4.11.9', - 'drizzle-orm': '^0.44.5', - zod: '^3.25.76', + 'drizzle-orm': '^0.45.1', + zod: '^4.3.6', }; if (databaseMode === 'turso') { @@ -338,19 +338,20 @@ import { logger } from 'hono/logger'; import { HTTPException } from 'hono/http-exception'; import { healthRoute } from './health'; import { usersRoute } from './users'; +import { env } from '../lib/env'; -export default function registerRoutes(app: Hono): void { +export function registerRoutes(app: Hono): void { app.use('*', cors()); app.use('*', logger()); app.onError((err, c) => { const isHttpError = err instanceof HTTPException; - const showDetailedError = process.env.NODE_ENV === 'development' || isHttpError; + const showDetailedError = env.NODE_ENV === 'development' || isHttpError; return c.json( { error: showDetailedError ? err.message : 'Internal Server Error', - stack: process.env.NODE_ENV === 'development' ? err.stack : undefined, + stack: env.NODE_ENV === 'development' ? err.stack : undefined, details: isHttpError ? (err as { cause?: unknown }).cause ?? null : null, }, isHttpError ? err.status : 500, @@ -374,6 +375,19 @@ async function writeProjectFiles( await mkdir(path.join(projectPath, 'src/middleware'), { recursive: true }); await mkdir(path.join(projectPath, 'src/lib'), { recursive: true }); + + await writeFile( + path.join(projectPath, 'src/lib/env.ts'), + `import { z } from 'zod'; + +const envSchema = z.object({ + NODE_ENV: z.enum(['development', 'test', 'production']).default('development'), +}); + +export const env = envSchema.parse(process.env); +`, + ); + await writeFile( path.join(projectPath, 'betterbase.config.ts'), `export default { @@ -427,7 +441,6 @@ bun.lockb .env.* !.env.example local.db -.drizzle `, ); @@ -447,7 +460,7 @@ export const healthRoute = new Hono(); healthRoute.get('/', async (c) => { try { - await db.run(sql\`select 1\`); + await db.${databaseMode === 'local' ? 'run' : 'execute'}(sql\`select 1\`); return c.json({ status: 'healthy', @@ -471,9 +484,9 @@ healthRoute.get('/', async (c) => { await writeFile( path.join(projectPath, 'src/middleware/validation.ts'), `import { HTTPException } from 'hono/http-exception'; -import type { ZodType } from 'zod'; +import { z } from 'zod'; -export function parseBody(schema: ZodType, body: unknown): T { +export function parseBody(schema: S, body: unknown): z.output { const result = schema.safeParse(body); if (!result.success) { @@ -510,9 +523,60 @@ const createUserSchema = z.object({ export const usersRoute = new Hono(); +const DEFAULT_LIMIT = 25; +const MAX_LIMIT = 100; +const DEFAULT_OFFSET = 0; + +function parseNonNegativeInt(value: string | undefined, fallback: number): number { + if (!value) { + return fallback; + } + + const parsed = Number(value); + if (!Number.isInteger(parsed) || parsed < 0) { + return fallback; + } + + return parsed; +} + usersRoute.get('/', async (c) => { - const allUsers = await db.select().from(users); - return c.json({ users: allUsers }); + const requestedLimit = parseNonNegativeInt(c.req.query('limit'), DEFAULT_LIMIT); + const limit = Math.min(requestedLimit, MAX_LIMIT); + const offset = parseNonNegativeInt(c.req.query('offset'), DEFAULT_OFFSET); + + if (limit === 0) { + return c.json({ + users: [], + pagination: { + limit, + offset, + hasMore: false, + }, + }); + } + + try { + const rows = await db.select().from(users).limit(limit + 1).offset(offset); + const hasMore = rows.length > limit; + const paginatedUsers = rows.slice(0, limit); + + return c.json({ + users: paginatedUsers, + pagination: { + limit, + offset, + hasMore, + }, + }); + } catch (error) { + if (error instanceof HTTPException) { + throw error; + } + + console.error('Failed to fetch users:', error); + throw error; + } }); usersRoute.post('/', async (c) => { @@ -545,7 +609,7 @@ usersRoute.post('/', async (c) => { await writeFile( path.join(projectPath, 'src/index.ts'), `import { Hono } from 'hono'; -import registerRoutes from './routes'; +import { registerRoutes } from './routes'; const app = new Hono(); registerRoutes(app); diff --git a/betterbase/packages/cli/src/commands/migrate.ts b/betterbase/packages/cli/src/commands/migrate.ts index 602915b..8999867 100644 --- a/betterbase/packages/cli/src/commands/migrate.ts +++ b/betterbase/packages/cli/src/commands/migrate.ts @@ -1,32 +1,352 @@ +import { Database } from 'bun:sqlite'; +import chalk from 'chalk'; +import { access, mkdir, readdir } from 'node:fs/promises'; +import path from 'node:path'; import { z } from 'zod'; +import { DEFAULT_DB_PATH } from '../constants'; import * as logger from '../utils/logger'; import * as prompts from '../utils/prompts'; const migrateOptionsSchema = z.object({ - destructive: z.boolean().optional(), + preview: z.boolean().optional(), + production: z.boolean().optional(), }); export type MigrateCommandOptions = z.infer; -/** - * Run the `bb migrate` command. - */ +export type MigrationChangeType = + | 'create_table' + | 'add_column' + | 'modify_column' + | 'drop_column' + | 'drop_table'; + +export interface MigrationChange { + type: MigrationChangeType; + table: string; + column?: string; + detail?: string; + isDestructive: boolean; +} + +interface DrizzleResult { + success: boolean; + stdout: string; + stderr: string; +} + +interface MigrationBackup { + sourcePath: string; + backupPath: string; +} + +const DRIZZLE_DIR = 'drizzle'; + +async function runDrizzleKit(args: string[]): Promise { + const proc = Bun.spawn(['bunx', 'drizzle-kit', ...args], { + cwd: process.cwd(), + stdout: 'pipe', + stderr: 'pipe', + }); + + const [stdout, stderr, exitCode] = await Promise.all([ + new Response(proc.stdout).text(), + new Response(proc.stderr).text(), + proc.exited, + ]); + + return { + success: exitCode === 0, + stdout, + stderr, + }; +} + +async function listSqlFiles(baseDir: string): Promise> { + const entries = new Map(); + const root = path.join(process.cwd(), baseDir); + + const walk = async (dir: string): Promise => { + try { + await access(dir); + } catch { + return; + } + + for (const entry of await readdir(dir, { withFileTypes: true })) { + const fullPath = path.join(dir, entry.name); + + if (entry.isDirectory()) { + await walk(fullPath); + continue; + } + + if (!fullPath.endsWith('.sql')) { + continue; + } + + entries.set(path.relative(root, fullPath), await Bun.file(fullPath).text()); + } + }; + + await walk(root); + return entries; +} + +function analyzeMigration(sqlStatements: string[]): MigrationChange[] { + const changes: MigrationChange[] = []; + + for (const statement of sqlStatements) { + const sql = statement.trim(); + if (!sql) { + continue; + } + + const createTable = sql.match(/create\s+table\s+"?([\w.-]+)"?/i); + if (createTable) { + changes.push({ type: 'create_table', table: createTable[1], isDestructive: false, detail: sql }); + continue; + } + + const dropTable = sql.match(/drop\s+table\s+"?([\w.-]+)"?/i); + if (dropTable) { + changes.push({ type: 'drop_table', table: dropTable[1], isDestructive: true, detail: sql }); + continue; + } + + const addColumn = sql.match(/alter\s+table\s+"?([\w.-]+)"?\s+add\s+column\s+"?([\w.-]+)"?/i); + if (addColumn) { + changes.push({ + type: 'add_column', + table: addColumn[1], + column: addColumn[2], + isDestructive: false, + detail: sql, + }); + continue; + } + + const dropColumn = sql.match(/alter\s+table\s+"?([\w.-]+)"?\s+drop\s+column\s+"?([\w.-]+)"?/i); + if (dropColumn) { + changes.push({ + type: 'drop_column', + table: dropColumn[1], + column: dropColumn[2], + isDestructive: true, + detail: sql, + }); + continue; + } + + const alterColumn = sql.match( + /alter\s+table\s+"?([\w.-]+)"?\s+(alter\s+column\s+"?([\w.-]+)"?|rename\s+column\s+"?([\w.-]+)"?)/i, + ); + if (alterColumn) { + changes.push({ + type: 'modify_column', + table: alterColumn[1], + column: alterColumn[3] ?? alterColumn[4], + isDestructive: /drop\s+not\s+null|set\s+not\s+null|set\s+data\s+type|rename\s+column/i.test(sql), + detail: sql, + }); + continue; + } + } + + return changes; +} + +function displayDiff(changes: MigrationChange[]): void { + console.log('\nšŸ“Š Migration Preview\n'); + + if (changes.length === 0) { + console.log(chalk.gray('No schema changes detected.')); + return; + } + + const newTables = changes.filter((c) => c.type === 'create_table'); + const newColumns = changes.filter((c) => c.type === 'add_column'); + const modified = changes.filter((c) => c.type === 'modify_column'); + const destructive = changes.filter((c) => c.isDestructive); + + if (newTables.length > 0) { + console.log(chalk.green('āœ… New Tables:')); + for (const change of newTables) { + console.log(chalk.green(` + ${change.table}`)); + } + console.log(''); + } + + if (newColumns.length > 0) { + console.log(chalk.green('āœ… New Columns:')); + for (const change of newColumns) { + console.log(chalk.green(` + ${change.table}.${change.column ?? ''}`)); + } + console.log(''); + } + + if (modified.length > 0) { + console.log(chalk.yellow('āš ļø Modified Columns:')); + for (const change of modified) { + console.log(chalk.yellow(` ! ${change.table}.${change.column ?? ''}`)); + } + console.log(''); + } + + if (destructive.length > 0) { + console.log(chalk.red('āŒ Destructive Changes:')); + for (const change of destructive) { + console.log(chalk.red(` - ${change.type}: ${change.table}${change.column ? `.${change.column}` : ''}`)); + console.log(chalk.red(' āš ļø This will DELETE DATA')); + } + console.log(''); + } +} + +async function confirmDestructive(changes: MigrationChange[]): Promise { + const destructive = changes.filter((c) => c.isDestructive); + if (destructive.length === 0) { + return true; + } + + logger.warn('DESTRUCTIVE CHANGES DETECTED:'); + for (const change of destructive) { + console.log(` - ${change.type}: ${change.table}${change.column ? `.${change.column}` : ''}`); + } + + const confirmation = await prompts.text({ message: 'Type "delete data" to confirm:' }); + if (confirmation !== 'delete data') { + logger.warn('Confirmation phrase mismatch. Migration cancelled.'); + return false; + } + + return true; +} + +async function backupDatabase(): Promise { + const sourcePath = process.env.DB_PATH ?? DEFAULT_DB_PATH; + + try { + await access(sourcePath); + } catch { + logger.warn(`No local database found at ${sourcePath}; skipping backup.`); + return null; + } + + const timestamp = new Date().toISOString().replace(/:/g, '-'); + const backupDir = path.join(process.cwd(), 'backups'); + await mkdir(backupDir, { recursive: true }); + + const backupPath = path.join(backupDir, `db-${timestamp}.sqlite`); + + const db = new Database(sourcePath, { readonly: true }); + try { + const snapshot = db.serialize(); + await Bun.write(backupPath, snapshot); + } finally { + db.close(); + } + + logger.success(`Backup saved: ${backupPath}`); + return { sourcePath, backupPath }; +} + +async function restoreBackup(backup: MigrationBackup | null): Promise { + if (backup === null) { + return; + } + + const bytes = await Bun.file(backup.backupPath).bytes(); + await Bun.write(backup.sourcePath, bytes); + + logger.warn(`Rollback complete. Restored database from ${backup.backupPath}`); +} + +function splitStatements(sql: string): string[] { + return sql + .split(/;\s*/g) + .map((statement) => statement.trim()) + .filter((statement) => statement.length > 0); +} + +async function collectChangesFromGenerate(): Promise { + const before = await listSqlFiles(DRIZZLE_DIR); + const generate = await runDrizzleKit(['generate']); + + if (!generate.success) { + if (/conflict|merge/i.test(generate.stderr)) { + throw new Error(`Migration conflict detected. Resolve migration files manually.\n${generate.stderr}`); + } + + throw new Error(`Failed to generate migrations.\n${generate.stderr || generate.stdout}`); + } + + const after = await listSqlFiles(DRIZZLE_DIR); + const changedSql: string[] = []; + + for (const [relativePath, content] of after.entries()) { + const previous = before.get(relativePath); + if (previous === content) { + continue; + } + + changedSql.push(...splitStatements(content)); + } + + return analyzeMigration(changedSql); +} + export async function runMigrateCommand(rawOptions: MigrateCommandOptions): Promise { const options = migrateOptionsSchema.parse(rawOptions); - const shouldContinue = - options.destructive === true - ? true - : await prompts.confirm({ - message: 'This migration may include destructive changes. Continue?', - initial: false, - }); + logger.info('Generating migration files with drizzle-kit...'); + const changes = await collectChangesFromGenerate(); + displayDiff(changes); - if (!shouldContinue) { - logger.warn('Migration cancelled by user.'); + if (options.preview) { + logger.info('Preview mode enabled. No migrations applied.'); return; } - logger.info('Analyzing migration plan...'); - logger.success('Migration scaffold complete. (Placeholder implementation)'); + if (options.production) { + const proceed = await prompts.confirm({ + message: 'Apply migrations to production now?', + initial: false, + }); + + if (!proceed) { + logger.warn('Migration cancelled by user.'); + return; + } + } + + let backup: MigrationBackup | null = null; + + if (changes.some((change) => change.isDestructive)) { + backup = await backupDatabase(); + + const confirmed = await confirmDestructive(changes); + if (!confirmed) { + return; + } + } + + logger.info('Applying migrations with drizzle-kit push...'); + const push = await runDrizzleKit(['push']); + + if (!push.success) { + await restoreBackup(backup); + + if (/\b(?:connect(?:ion)?|econnrefused|econnreset|enotfound|etimedout)\b/i.test(push.stderr)) { + throw new Error(`Database connection failed while applying migration.\n${push.stderr}`); + } + + if (/conflict|merge/i.test(push.stderr)) { + throw new Error(`Migration conflict detected during push. Please resolve and retry.\n${push.stderr}`); + } + + throw new Error(`Migration push failed.\n${push.stderr || push.stdout}`); + } + + logger.success('Migration complete!'); } diff --git a/betterbase/packages/cli/src/constants.ts b/betterbase/packages/cli/src/constants.ts new file mode 100644 index 0000000..8daff7f --- /dev/null +++ b/betterbase/packages/cli/src/constants.ts @@ -0,0 +1 @@ +export const DEFAULT_DB_PATH = 'local.db'; diff --git a/betterbase/packages/cli/src/index.ts b/betterbase/packages/cli/src/index.ts index e0136da..25e8e04 100644 --- a/betterbase/packages/cli/src/index.ts +++ b/betterbase/packages/cli/src/index.ts @@ -1,6 +1,9 @@ import { Command, CommanderError } from 'commander'; import { runInitCommand } from './commands/init'; +import { runDevCommand } from './commands/dev'; import { runMigrateCommand } from './commands/migrate'; +import { runAuthSetupCommand } from './commands/auth'; +import { runGenerateCrudCommand } from './commands/generate'; import * as logger from './utils/logger'; import packageJson from '../package.json'; @@ -24,12 +27,57 @@ export function createProgram(): Command { await runInitCommand({ projectName }); }); + + program + .command('dev') + .description('Watch schema/routes and regenerate .betterbase-context.json') + .argument('[project-root]', 'project root directory', process.cwd()) + .action(async (projectRoot: string) => { + await runDevCommand(projectRoot); + }); + + + const auth = program.command('auth').description('Authentication helpers'); + + auth + .command('setup') + .description('Install and scaffold BetterAuth integration') + .argument('[project-root]', 'project root directory', process.cwd()) + .action(async (projectRoot: string) => { + await runAuthSetupCommand(projectRoot); + }); + + + const generate = program.command('generate').description('Code generation helpers'); + + generate + .command('crud') + .description('Generate full CRUD routes for a table') + .argument('', 'table name from src/db/schema.ts') + .argument('[project-root]', 'project root directory', process.cwd()) + .action(async (tableName: string, projectRoot: string) => { + await runGenerateCrudCommand(projectRoot, tableName); + }); + program .command('migrate') - .description('Run BetterBase database migrations') - .option('--destructive', 'allow destructive migration flow') - .action(async (options: { destructive?: boolean }) => { - await runMigrateCommand({ destructive: options.destructive }); + .description('Generate and apply migrations for local development') + .action(async () => { + await runMigrateCommand({}); + }); + + program + .command('migrate:preview') + .description('Preview migration diff without applying changes') + .action(async () => { + await runMigrateCommand({ preview: true }); + }); + + program + .command('migrate:production') + .description('Apply migrations to production (requires confirmation)') + .action(async () => { + await runMigrateCommand({ production: true }); }); return program; diff --git a/betterbase/packages/cli/src/utils/context-generator.ts b/betterbase/packages/cli/src/utils/context-generator.ts new file mode 100644 index 0000000..aabf0f4 --- /dev/null +++ b/betterbase/packages/cli/src/utils/context-generator.ts @@ -0,0 +1,84 @@ +import { existsSync, writeFileSync } from 'node:fs'; +import path from 'node:path'; +import { RouteScanner, type RouteInfo } from './route-scanner'; +import { SchemaScanner, type TableInfo } from './schema-scanner'; +import * as logger from './logger'; + +export interface BetterBaseContext { + version: string; + generated_at: string; + tables: Record; + routes: Record; + ai_prompt: string; +} + +export class ContextGenerator { + async generate(projectRoot: string): Promise { + const schemaPath = path.join(projectRoot, 'src/db/schema.ts'); + const routesPath = path.join(projectRoot, 'src/routes'); + + let tables: Record = {}; + let routes: Record = {}; + + if (existsSync(schemaPath)) { + const schemaScanner = new SchemaScanner(schemaPath); + tables = schemaScanner.scan(); + } else { + logger.warn(`Schema file not found; continuing with empty tables: ${schemaPath}`); + } + + if (existsSync(routesPath)) { + const routeScanner = new RouteScanner(); + routes = routeScanner.scan(routesPath); + } else { + logger.warn(`Routes directory not found; continuing with empty routes: ${routesPath}`); + } + + const context: BetterBaseContext = { + version: '1.0.0', + generated_at: new Date().toISOString(), + tables, + routes, + ai_prompt: this.generateAIPrompt(tables, routes), + }; + + const outputPath = path.join(projectRoot, '.betterbase-context.json'); + writeFileSync(outputPath, `${JSON.stringify(context, null, 2)}\n`); + console.log(`āœ… Generated ${outputPath}`); + + return context; + } + + private generateAIPrompt(tables: Record, routes: Record): string { + const tableNames = Object.keys(tables); + const routeCount = Object.values(routes).reduce((count, methods) => count + methods.length, 0); + + let prompt = `This is a BetterBase backend project with ${tableNames.length} tables and ${routeCount} API endpoints.\n\n`; + + prompt += 'DATABASE SCHEMA:\n'; + for (const tableName of tableNames) { + const table = tables[tableName]; + const columns = Object.keys(table.columns ?? {}).join(', '); + prompt += `- ${tableName}: ${columns}\n`; + if (table.relations.length > 0) { + prompt += ` Relations: ${table.relations.join(', ')}\n`; + } + } + + prompt += '\nAPI ENDPOINTS:\n'; + for (const [routePath, methods] of Object.entries(routes)) { + for (const route of methods) { + const auth = route.requiresAuth ? ' [AUTH REQUIRED]' : ''; + prompt += `- ${route.method} ${routePath}${auth}\n`; + } + } + + prompt += '\nWhen writing code for this project:\n'; + prompt += "1. Always import tables from './src/db/schema'\n"; + prompt += '2. Use Drizzle ORM for database queries\n'; + prompt += '3. Validate inputs with Zod\n'; + prompt += '4. Return JSON responses with proper status codes\n'; + + return prompt; + } +} diff --git a/betterbase/packages/cli/src/utils/route-scanner.ts b/betterbase/packages/cli/src/utils/route-scanner.ts new file mode 100644 index 0000000..5fc7aa9 --- /dev/null +++ b/betterbase/packages/cli/src/utils/route-scanner.ts @@ -0,0 +1,165 @@ +import { readdirSync, readFileSync } from 'node:fs'; +import path from 'node:path'; +import * as ts from 'typescript'; + +export interface RouteInfo { + method: string; + path: string; + requiresAuth: boolean; + inputSchema?: string; + outputSchema?: string; +} + +function getStringLiteral(node: ts.Node | undefined): string { + if (!node) return ''; + if (ts.isStringLiteral(node) || ts.isNoSubstitutionTemplateLiteral(node)) { + return node.text; + } + return node.getText(); +} + +function isAuthLikeName(value: string): boolean { + return /\bauth\b/i.test(value) || /^auth/i.test(value) || /^(authMiddleware|requireAuth)$/i.test(value); +} + +function collectTsFiles(dir: string): string[] { + const files: string[] = []; + + const walk = (current: string): void => { + let entries: ReturnType; + try { + entries = readdirSync(current, { withFileTypes: true }); + } catch { + return; + } + + for (const entry of entries) { + const fullPath = path.join(current, entry.name); + if (entry.isDirectory()) { + walk(fullPath); + continue; + } + + if (entry.isFile() && entry.name.endsWith('.ts') && !entry.name.endsWith('.d.ts')) { + files.push(fullPath); + } + } + }; + + walk(dir); + return files; +} + +export class RouteScanner { + scan(routesDir: string): Record { + const files = collectTsFiles(routesDir); + const routes: Record = {}; + + for (const file of files) { + const fileRoutes = this.scanFile(file); + for (const [routePath, entries] of Object.entries(fileRoutes)) { + routes[routePath] = [...(routes[routePath] ?? []), ...entries]; + } + } + + return routes; + } + + private scanFile(filePath: string): Record { + const sourceCode = readFileSync(filePath, 'utf-8'); + const sourceFile = ts.createSourceFile(filePath, sourceCode, ts.ScriptTarget.Latest, true, ts.ScriptKind.TS); + + const routes: Record = {}; + const authIdentifiers = new Set(); + + const isAuthMiddlewareExpression = (expr: ts.Expression): boolean => { + if (ts.isIdentifier(expr)) { + return authIdentifiers.has(expr.text) || isAuthLikeName(expr.text); + } + + if (ts.isPropertyAccessExpression(expr)) { + const text = expr.getText(sourceFile); + return isAuthLikeName(text); + } + + return false; + }; + + const collectAuthIdentifiers = (node: ts.Node): void => { + if (!ts.isVariableStatement(node)) return; + + for (const declaration of node.declarationList.declarations) { + if (!ts.isIdentifier(declaration.name) || !declaration.initializer) continue; + const initializer = declaration.initializer; + if (ts.isCallExpression(initializer) && ts.isIdentifier(initializer.expression)) { + if (initializer.expression.text === 'createMiddleware' || initializer.expression.text === 'requireAuth') { + authIdentifiers.add(declaration.name.text); + } + } + + if (isAuthLikeName(declaration.name.text)) { + authIdentifiers.add(declaration.name.text); + } + } + }; + + ts.forEachChild(sourceFile, collectAuthIdentifiers); + + const visit = (node: ts.Node): void => { + if (ts.isCallExpression(node) && ts.isPropertyAccessExpression(node.expression)) { + const method = node.expression.name.text.toLowerCase(); + const httpMethods = new Set(['get', 'post', 'put', 'patch', 'delete', 'options', 'head']); + + if (httpMethods.has(method)) { + const [pathArg, ...handlerArgs] = node.arguments; + const routePath = getStringLiteral(pathArg); + + let requiresAuth = false; + for (const arg of handlerArgs) { + if (isAuthMiddlewareExpression(arg)) { + requiresAuth = true; + break; + } + } + + const route: RouteInfo = { + method: method.toUpperCase(), + path: routePath, + requiresAuth, + inputSchema: this.findSchemaUsage(sourceFile, handlerArgs, 'input'), + outputSchema: this.findSchemaUsage(sourceFile, handlerArgs, 'output'), + }; + + if (!routes[routePath]) { + routes[routePath] = []; + } + + routes[routePath].push(route); + } + } + + ts.forEachChild(node, visit); + }; + + visit(sourceFile); + return routes; + } + + private findSchemaUsage(sourceFile: ts.SourceFile, args: ts.NodeArray, mode: 'input' | 'output'): string | undefined { + const text = args.map((arg) => arg.getText(sourceFile)).join('\n'); + + if (mode === 'input') { + const parseMatch = text.match(/([A-Za-z0-9_]+Schema)\.(safeParse|parse)\(/); + if (parseMatch) return parseMatch[1]; + const middlewareMatch = text.match(/parseBody\(([^,]+),/); + if (middlewareMatch) return middlewareMatch[1].trim(); + } + + if (mode === 'output') { + const outputMatch = text.match(/([A-Za-z0-9_]+Schema)\.(parse|safeParse)\([^)]*c\.json/); + if (outputMatch) return outputMatch[1]; + } + + return undefined; + } +} diff --git a/betterbase/packages/cli/src/utils/scanner.ts b/betterbase/packages/cli/src/utils/scanner.ts new file mode 100644 index 0000000..0dee86d --- /dev/null +++ b/betterbase/packages/cli/src/utils/scanner.ts @@ -0,0 +1,252 @@ +import { readFileSync } from 'node:fs'; +import * as ts from 'typescript'; + +export interface ColumnInfo { + name: string; + type: string; + nullable: boolean; + unique: boolean; + primaryKey: boolean; + defaultValue?: string; + references?: string; +} + +export interface TableInfo { + name: string; + columns: Record; + relations: string[]; + indexes: string[]; +} + +function unwrapExpression(expression: ts.Expression): ts.Expression { + let current = expression; + + while ( + ts.isParenthesizedExpression(current) || + ts.isAsExpression(current) || + ts.isTypeAssertionExpression(current) || + ts.isSatisfiesExpression(current) + ) { + current = (current as ts.ParenthesizedExpression | ts.AsExpression | ts.TypeAssertion | ts.SatisfiesExpression) + .expression; + } + + return current; +} + +function getCallName(call: ts.CallExpression): string { + if (ts.isIdentifier(call.expression)) { + return call.expression.text; + } + + if (ts.isPropertyAccessExpression(call.expression)) { + return call.expression.name.text; + } + + return ''; +} + +function getExpressionText(sourceFile: ts.SourceFile, node: ts.Node | undefined): string { + if (!node) { + return ''; + } + + return node.getText(sourceFile); +} + +export class SchemaScanner { + private readonly sourceFile: ts.SourceFile; + + constructor(schemaPath: string) { + let sourceCode: string; + + try { + sourceCode = readFileSync(schemaPath, 'utf-8'); + } catch (error) { + const message = error instanceof Error ? error.message : String(error); + throw new Error(`Failed to read schema file at ${schemaPath}: ${message}`); + } + + this.sourceFile = ts.createSourceFile(schemaPath, sourceCode, ts.ScriptTarget.Latest, true, ts.ScriptKind.TS); + } + + scan(): Record { + const tables: Record = {}; + + const visit = (node: ts.Node): void => { + if (ts.isVariableStatement(node)) { + for (const declaration of node.declarationList.declarations) { + if (!ts.isIdentifier(declaration.name) || !declaration.initializer) { + continue; + } + + const initializer = unwrapExpression(declaration.initializer); + if (!ts.isCallExpression(initializer)) { + continue; + } + + const functionName = getCallName(initializer); + if (functionName === 'sqliteTable' || functionName === 'pgTable' || functionName === 'mysqlTable') { + tables[declaration.name.text] = this.parseTable(initializer); + } + } + } + + ts.forEachChild(node, visit); + }; + + visit(this.sourceFile); + return tables; + } + + private parseTable(callExpression: ts.CallExpression): TableInfo { + const [nameArg, columnsArg, indexesArg] = callExpression.arguments; + const tableName = ts.isStringLiteral(nameArg) ? nameArg.text : getExpressionText(this.sourceFile, nameArg); + + const columns: Record = {}; + const relations: string[] = []; + + if (columnsArg && ts.isObjectLiteralExpression(columnsArg)) { + for (const property of columnsArg.properties) { + if (!ts.isPropertyAssignment(property)) { + continue; + } + + const columnName = ts.isIdentifier(property.name) + ? property.name.text + : ts.isStringLiteral(property.name) + ? property.name.text + : property.name.getText(this.sourceFile); + + const columnInfo = this.parseColumn(columnName, property.initializer); + columns[columnName] = columnInfo; + + if (columnInfo.references) { + relations.push(columnInfo.references); + } + } + } + + const indexes = this.parseIndexes(indexesArg); + + return { + name: tableName, + columns, + relations, + indexes, + }; + } + + private parseIndexes(indexesArg: ts.Expression | undefined): string[] { + if (!indexesArg) { + return []; + } + + const indexes: string[] = []; + const indexRoot = unwrapExpression(indexesArg); + + const collectFromObject = (obj: ts.ObjectLiteralExpression): void => { + for (const property of obj.properties) { + if (!ts.isPropertyAssignment(property)) { + continue; + } + + const value = unwrapExpression(property.initializer); + if (!ts.isCallExpression(value)) { + continue; + } + + const callName = getCallName(value); + if (callName === 'index' || callName === 'uniqueIndex') { + const key = ts.isIdentifier(property.name) + ? property.name.text + : ts.isStringLiteral(property.name) + ? property.name.text + : property.name.getText(this.sourceFile); + indexes.push(key); + } + } + }; + + if (ts.isArrowFunction(indexRoot) || ts.isFunctionExpression(indexRoot)) { + const body = indexRoot.body; + if (ts.isObjectLiteralExpression(body)) { + collectFromObject(body); + } + + if (ts.isBlock(body)) { + for (const statement of body.statements) { + if (!ts.isReturnStatement(statement) || !statement.expression) { + continue; + } + + const expression = unwrapExpression(statement.expression); + if (ts.isObjectLiteralExpression(expression)) { + collectFromObject(expression); + } + } + } + } + + return indexes; + } + + private parseColumn(columnName: string, expression: ts.Expression): ColumnInfo { + let type = 'unknown'; + let nullable = true; + let unique = false; + let primaryKey = false; + let defaultValue: string | undefined; + let references: string | undefined; + + let current = unwrapExpression(expression); + + while (ts.isCallExpression(current)) { + const methodName = getCallName(current); + + if (methodName === 'text' || methodName === 'varchar' || methodName === 'char') { + type = 'text'; + } else if (methodName === 'integer' || methodName === 'int' || methodName === 'bigint' || methodName === 'serial') { + type = 'integer'; + } else if (methodName === 'real' || methodName === 'numeric' || methodName === 'decimal' || methodName === 'doublePrecision') { + type = 'number'; + } else if (methodName === 'boolean') { + type = 'boolean'; + } else if (methodName === 'timestamp' || methodName === 'datetime') { + type = 'datetime'; + } else if (methodName === 'json' || methodName === 'jsonb') { + type = 'json'; + } else if (methodName === 'blob') { + type = 'blob'; + } else if (methodName === 'notNull') { + nullable = false; + } else if (methodName === 'unique') { + unique = true; + } else if (methodName === 'primaryKey') { + primaryKey = true; + nullable = false; + } else if (methodName.startsWith('default')) { + defaultValue = getExpressionText(this.sourceFile, current.arguments[0]); + } else if (methodName === 'references') { + references = getExpressionText(this.sourceFile, current.arguments[0]); + } + + if (ts.isPropertyAccessExpression(current.expression)) { + current = unwrapExpression(current.expression.expression); + continue; + } + + break; + } + + return { + name: columnName, + type, + nullable, + unique, + primaryKey, + defaultValue, + references, + }; + } +} diff --git a/betterbase/packages/cli/src/utils/schema-scanner.ts b/betterbase/packages/cli/src/utils/schema-scanner.ts new file mode 100644 index 0000000..ff8ea30 --- /dev/null +++ b/betterbase/packages/cli/src/utils/schema-scanner.ts @@ -0,0 +1,2 @@ +export { SchemaScanner } from './scanner'; +export type { ColumnInfo, TableInfo } from './scanner'; diff --git a/betterbase/packages/cli/test/context-generator.test.ts b/betterbase/packages/cli/test/context-generator.test.ts new file mode 100644 index 0000000..2499430 --- /dev/null +++ b/betterbase/packages/cli/test/context-generator.test.ts @@ -0,0 +1,107 @@ +import { mkdirSync, mkdtempSync, readFileSync, rmSync, writeFileSync } from 'node:fs'; +import { tmpdir } from 'node:os'; +import path from 'node:path'; +import { describe, expect, test } from 'bun:test'; +import { ContextGenerator } from '../src/utils/context-generator'; + +describe('ContextGenerator', () => { + test('creates .betterbase-context.json from schema and routes', async () => { + const root = mkdtempSync(path.join(tmpdir(), 'bb-context-')); + + try { + mkdirSync(path.join(root, 'src/db'), { recursive: true }); + mkdirSync(path.join(root, 'src/routes'), { recursive: true }); + + writeFileSync( + path.join(root, 'src/db/schema.ts'), + ` + import { sqliteTable, text } from 'drizzle-orm/sqlite-core'; + export const users = sqliteTable('users', { + id: text('id').primaryKey(), + email: text('email').notNull(), + }); + `, + ); + + writeFileSync( + path.join(root, 'src/routes/index.ts'), + ` + import { Hono } from 'hono'; + const app = new Hono(); + app.get('/health', (c) => c.json({ ok: true })); + export default app; + `, + ); + + const generator = new ContextGenerator(); + const context = await generator.generate(root); + + expect(context.tables.users).toBeDefined(); + expect(context.tables.users.columns.id).toBeDefined(); + expect(context.tables.users.columns.email).toBeDefined(); + expect(context.routes['/health']).toBeDefined(); + + const file = JSON.parse(readFileSync(path.join(root, '.betterbase-context.json'), 'utf-8')); + expect(file.tables.users.name).toBe('users'); + expect(file.tables.users.columns.id.type).toBe('text'); + expect(file.tables.users.columns.email.type).toBe('text'); + expect(Array.isArray(file.routes['/health'])).toBe(true); + expect(file.routes['/health'].length).toBeGreaterThan(0); + expect(file.routes['/health'][0].method).toBe('GET'); + } finally { + rmSync(root, { recursive: true, force: true }); + } + }); + + test('handles missing routes directory with empty routes', async () => { + const root = mkdtempSync(path.join(tmpdir(), 'bb-context-no-routes-')); + + try { + mkdirSync(path.join(root, 'src/db'), { recursive: true }); + writeFileSync( + path.join(root, 'src/db/schema.ts'), + ` + import { sqliteTable, text } from 'drizzle-orm/sqlite-core'; + export const users = sqliteTable('users', { id: text('id').primaryKey() }); + `, + ); + + const context = await new ContextGenerator().generate(root); + expect(context.routes).toEqual({}); + + const file = JSON.parse(readFileSync(path.join(root, '.betterbase-context.json'), 'utf-8')); + expect(file.routes).toEqual({}); + } finally { + rmSync(root, { recursive: true, force: true }); + } + }); + + test('handles empty schema file with empty tables', async () => { + const root = mkdtempSync(path.join(tmpdir(), 'bb-context-empty-schema-')); + + try { + mkdirSync(path.join(root, 'src/db'), { recursive: true }); + mkdirSync(path.join(root, 'src/routes'), { recursive: true }); + writeFileSync(path.join(root, 'src/db/schema.ts'), 'export {};\n'); + + const context = await new ContextGenerator().generate(root); + expect(context.tables).toEqual({}); + } finally { + rmSync(root, { recursive: true, force: true }); + } + }); + + test('handles missing schema file with empty tables', async () => { + const root = mkdtempSync(path.join(tmpdir(), 'bb-context-no-schema-')); + + try { + mkdirSync(path.join(root, 'src/routes'), { recursive: true }); + writeFileSync(path.join(root, 'src/routes/index.ts'), 'export {};\n'); + + const context = await new ContextGenerator().generate(root); + expect(context.tables).toEqual({}); + } finally { + rmSync(root, { recursive: true, force: true }); + } + }); +}); diff --git a/betterbase/packages/cli/test/route-scanner.test.ts b/betterbase/packages/cli/test/route-scanner.test.ts new file mode 100644 index 0000000..9f56991 --- /dev/null +++ b/betterbase/packages/cli/test/route-scanner.test.ts @@ -0,0 +1,45 @@ +import { mkdirSync, mkdtempSync, rmSync, writeFileSync } from 'node:fs'; +import { tmpdir } from 'node:os'; +import path from 'node:path'; +import { describe, expect, test } from 'bun:test'; +import { RouteScanner } from '../src/utils/route-scanner'; + +describe('RouteScanner', () => { + test('extracts hono routes with auth and schemas', async () => { + const root = mkdtempSync(path.join(tmpdir(), 'bb-routes-')); + + try { + const routesDir = path.join(root, 'src/routes'); + mkdirSync(routesDir, { recursive: true }); + + writeFileSync( + path.join(routesDir, 'users.ts'), + ` + import { Hono } from 'hono'; + import { z } from 'zod'; + import { authMiddleware } from '../middleware/auth'; + + const createUserSchema = z.object({ email: z.string().email() }); + export const users = new Hono(); + + users.get('/users', authMiddleware, (c) => c.json({ users: [] })); + users.post('/users', async (c) => { + const body = await c.req.json(); + createUserSchema.parse(body); + return c.json({ ok: true }); + }); + `, + ); + + const scanner = new RouteScanner(); + const routes = scanner.scan(routesDir); + + expect(routes['/users']).toBeDefined(); + expect(routes['/users'].length).toBe(2); + expect(routes['/users'][0].requiresAuth).toBe(true); + expect(routes['/users'][1].inputSchema).toBe('createUserSchema'); + } finally { + rmSync(root, { recursive: true, force: true }); + } + }); +}); diff --git a/betterbase/packages/cli/test/scanner.test.ts b/betterbase/packages/cli/test/scanner.test.ts new file mode 100644 index 0000000..edf5594 --- /dev/null +++ b/betterbase/packages/cli/test/scanner.test.ts @@ -0,0 +1,61 @@ +import { mkdtempSync, rmSync, writeFileSync } from 'node:fs'; +import { tmpdir } from 'node:os'; +import path from 'node:path'; +import { describe, expect, test } from 'bun:test'; +import { SchemaScanner } from '../src/utils/scanner'; + +describe('SchemaScanner', () => { + test('extracts tables, columns, relations, and indexes from drizzle schema', () => { + const dir = mkdtempSync(path.join(tmpdir(), 'bb-scanner-')); + + try { + const schemaPath = path.join(dir, 'schema.ts'); + writeFileSync( + schemaPath, + ` + import { sqliteTable, text, integer, index } from 'drizzle-orm/sqlite-core'; + + export const users = sqliteTable('users', { + id: text('id').primaryKey(), + email: text('email').notNull().unique(), + age: integer('age').default(18), + }, (table) => ({ + usersEmailIdx: index('users_email_idx').on(table.email), + })); + + export const posts = sqliteTable('posts', { + id: text('id').primaryKey(), + userId: text('user_id').notNull().references(() => users.id), + title: text('title').notNull(), + }); + + export const comments = sqliteTable('comments', { + id: text('id').primaryKey(), + postId: text('post_id').notNull().references(() => posts.id), + body: text('body'), + }); + `, + ); + + const scanner = new SchemaScanner(schemaPath); + const tables = scanner.scan(); + + expect(Object.keys(tables)).toEqual(['users', 'posts', 'comments']); + + expect(tables.users.name).toBe('users'); + expect(tables.users.columns.id.primaryKey).toBe(true); + expect(tables.users.columns.id.nullable).toBe(false); + expect(tables.users.columns.email.unique).toBe(true); + expect(tables.users.columns.age.defaultValue).toBe('18'); + expect(tables.users.indexes).toContain('usersEmailIdx'); + + expect(tables.posts.columns.userId.references).toBe('() => users.id'); + expect(tables.posts.relations).toContain('() => users.id'); + + expect(tables.comments.columns.postId.references).toBe('() => posts.id'); + expect(tables.comments.relations).toContain('() => posts.id'); + } finally { + rmSync(dir, { recursive: true, force: true }); + } + }); +}); diff --git a/betterbase/packages/cli/test/smoke.test.ts b/betterbase/packages/cli/test/smoke.test.ts index f082470..5c49ed4 100644 --- a/betterbase/packages/cli/test/smoke.test.ts +++ b/betterbase/packages/cli/test/smoke.test.ts @@ -14,9 +14,42 @@ describe('cli', () => { expect(init?.registeredArguments[0]?.name()).toBe('project-name'); }); - test('registers migrate command', () => { + + + + test('registers generate crud command', () => { + const program = createProgram(); + const generate = program.commands.find((command) => command.name() === 'generate'); + expect(generate).toBeDefined(); + + const crud = generate?.commands.find((command) => command.name() === 'crud'); + expect(crud).toBeDefined(); + }); + + test('registers auth setup command', () => { + const program = createProgram(); + const auth = program.commands.find((command) => command.name() === 'auth'); + expect(auth).toBeDefined(); + + const setup = auth?.commands.find((command) => command.name() === 'setup'); + expect(setup).toBeDefined(); + }); + + test('registers dev command', () => { + const program = createProgram(); + const dev = program.commands.find((command) => command.name() === 'dev'); + expect(dev).toBeDefined(); + }); + + test('registers migrate commands', () => { const program = createProgram(); + const migrate = program.commands.find((command) => command.name() === 'migrate'); + const preview = program.commands.find((command) => command.name() === 'migrate:preview'); + const production = program.commands.find((command) => command.name() === 'migrate:production'); + expect(migrate).toBeDefined(); + expect(preview).toBeDefined(); + expect(production).toBeDefined(); }); }); diff --git a/betterbase/templates/base/README.md b/betterbase/templates/base/README.md index e6d4cf1..8d481e3 100644 --- a/betterbase/templates/base/README.md +++ b/betterbase/templates/base/README.md @@ -26,3 +26,15 @@ src/ betterbase.config.ts drizzle.config.ts ``` + + +## Quick Start + +- Install dependencies: `bun install` +- Start development server: `bun run dev` +- Generate Drizzle migrations: `bun run db:generate` +- Apply migrations locally: `bun run db:push` +- Build for production: `bun run build` +- Start production server: `bun run start` + +Environment variables are validated in `src/lib/env.ts` (`NODE_ENV`, `PORT`, `DB_PATH`). diff --git a/betterbase/templates/base/package.json b/betterbase/templates/base/package.json index e631611..c74eee0 100644 --- a/betterbase/templates/base/package.json +++ b/betterbase/templates/base/package.json @@ -6,7 +6,9 @@ "dev": "bun --hot run src/index.ts", "db:generate": "drizzle-kit generate", "db:push": "bun run src/db/migrate.ts", - "typecheck": "tsc --noEmit" + "typecheck": "tsc --noEmit", + "build": "bun build src/index.ts --outfile dist/index.js --target bun", + "start": "bun run dist/index.js" }, "dependencies": { "hono": "^4.6.10", diff --git a/betterbase/templates/base/src/db/index.ts b/betterbase/templates/base/src/db/index.ts index ddbbc3f..485c7b4 100644 --- a/betterbase/templates/base/src/db/index.ts +++ b/betterbase/templates/base/src/db/index.ts @@ -1,8 +1,10 @@ import { Database } from 'bun:sqlite'; import { drizzle } from 'drizzle-orm/bun-sqlite'; +import { env } from '../lib/env'; import * as schema from './schema'; -const dbPath = process.env.DB_PATH ?? 'local.db'; -const sqlite = new Database(dbPath, { create: true }); + +// env.DB_PATH is always present because env schema provides a default. +const sqlite = new Database(env.DB_PATH, { create: true }); export const db = drizzle(sqlite, { schema }); diff --git a/betterbase/templates/base/src/index.ts b/betterbase/templates/base/src/index.ts index 4065ba5..29fbe4b 100644 --- a/betterbase/templates/base/src/index.ts +++ b/betterbase/templates/base/src/index.ts @@ -1,4 +1,6 @@ import { Hono } from 'hono'; +import { env } from './lib/env'; +import { realtime } from './lib/realtime'; import { registerRoutes } from './routes'; const app = new Hono(); @@ -6,8 +8,19 @@ registerRoutes(app); const server = Bun.serve({ fetch: app.fetch, - port: Number(process.env.PORT ?? 3000), - development: process.env.NODE_ENV === 'development', + port: env.PORT, + development: env.NODE_ENV === 'development', + websocket: { + open(ws) { + realtime.handleConnection(ws); + }, + message(ws, message) { + realtime.handleMessage(ws, message.toString()); + }, + close(ws) { + realtime.handleClose(ws); + }, + }, }); console.log(`šŸš€ Server running at http://localhost:${server.port}`); diff --git a/betterbase/templates/base/src/lib/env.ts b/betterbase/templates/base/src/lib/env.ts index c4a0b38..c61b432 100644 --- a/betterbase/templates/base/src/lib/env.ts +++ b/betterbase/templates/base/src/lib/env.ts @@ -1,8 +1,11 @@ import { z } from 'zod'; +export const DEFAULT_DB_PATH = 'local.db'; + const envSchema = z.object({ NODE_ENV: z.enum(['development', 'test', 'production']).default('development'), PORT: z.coerce.number().default(3000), + DB_PATH: z.string().min(1).default(DEFAULT_DB_PATH), }); export const env = envSchema.parse(process.env); diff --git a/betterbase/templates/base/src/lib/realtime.ts b/betterbase/templates/base/src/lib/realtime.ts new file mode 100644 index 0000000..7cb3fd1 --- /dev/null +++ b/betterbase/templates/base/src/lib/realtime.ts @@ -0,0 +1,151 @@ +import type { ServerWebSocket } from 'bun'; + +export interface Subscription { + table: string; + filter?: Record; +} + +interface Client { + ws: ServerWebSocket; + subscriptions: Set; +} + +interface RealtimeUpdatePayload { + type: 'update'; + table: string; + event: 'INSERT' | 'UPDATE' | 'DELETE'; + data: unknown; + timestamp: string; +} + +export class RealtimeServer { + private clients = new Map, Client>(); + private tableSubscribers = new Map>>(); + + handleConnection(ws: ServerWebSocket): void { + console.log('Client connected'); + this.clients.set(ws, { + ws, + subscriptions: new Set(), + }); + } + + handleMessage(ws: ServerWebSocket, rawMessage: string): void { + try { + const data = JSON.parse(rawMessage) as { type?: string; table?: string; filter?: Record }; + + if (!data.type || !data.table) { + ws.send(JSON.stringify({ error: 'Message must include type and table' })); + return; + } + + switch (data.type) { + case 'subscribe': + this.subscribe(ws, data.table, data.filter); + break; + case 'unsubscribe': + this.unsubscribe(ws, data.table); + break; + default: + ws.send(JSON.stringify({ error: 'Unknown message type' })); + break; + } + } catch { + ws.send(JSON.stringify({ error: 'Invalid message format' })); + } + } + + handleClose(ws: ServerWebSocket): void { + console.log('Client disconnected'); + + const client = this.clients.get(ws); + if (client) { + for (const table of client.subscriptions) { + const subscribers = this.tableSubscribers.get(table); + subscribers?.delete(ws); + + if (subscribers && subscribers.size === 0) { + this.tableSubscribers.delete(table); + } + } + } + + this.clients.delete(ws); + } + + broadcast(table: string, event: RealtimeUpdatePayload['event'], data: unknown): void { + const subscribers = this.tableSubscribers.get(table); + if (!subscribers || subscribers.size === 0) { + return; + } + + const payload: RealtimeUpdatePayload = { + type: 'update', + table, + event, + data, + timestamp: new Date().toISOString(), + }; + + const message = JSON.stringify(payload); + + for (const ws of subscribers) { + try { + ws.send(message); + } catch { + this.handleClose(ws); + } + } + + console.log(`Broadcasted ${event} on ${table} to ${subscribers.size} clients`); + } + + private subscribe(ws: ServerWebSocket, table: string, filter?: Record): void { + const client = this.clients.get(ws); + if (!client) { + return; + } + + client.subscriptions.add(table); + + if (!this.tableSubscribers.has(table)) { + this.tableSubscribers.set(table, new Set()); + } + + this.tableSubscribers.get(table)?.add(ws); + + ws.send( + JSON.stringify({ + type: 'subscribed', + table, + filter, + }), + ); + + console.log(`Client subscribed to ${table}`); + } + + private unsubscribe(ws: ServerWebSocket, table: string): void { + const client = this.clients.get(ws); + if (!client) { + return; + } + + client.subscriptions.delete(table); + const subscribers = this.tableSubscribers.get(table); + subscribers?.delete(ws); + + if (subscribers && subscribers.size === 0) { + this.tableSubscribers.delete(table); + } + + ws.send( + JSON.stringify({ + type: 'unsubscribed', + table, + }), + ); + } +} + +export const realtime = new RealtimeServer(); diff --git a/betterbase/templates/base/src/routes/index.ts b/betterbase/templates/base/src/routes/index.ts index 64a9e83..2fb73e6 100644 --- a/betterbase/templates/base/src/routes/index.ts +++ b/betterbase/templates/base/src/routes/index.ts @@ -2,6 +2,7 @@ import { Hono } from 'hono'; import { cors } from 'hono/cors'; import { logger } from 'hono/logger'; import { HTTPException } from 'hono/http-exception'; +import { env } from '../lib/env'; import { healthRoute } from './health'; import { usersRoute } from './users'; @@ -11,12 +12,12 @@ export function registerRoutes(app: Hono): void { app.onError((err, c) => { const isHttpError = err instanceof HTTPException; - const showDetailedError = process.env.NODE_ENV === 'development' || isHttpError; + const showDetailedError = env.NODE_ENV === 'development' || isHttpError; return c.json( { error: showDetailedError ? err.message : 'Internal Server Error', - stack: process.env.NODE_ENV === 'development' ? err.stack : undefined, + stack: env.NODE_ENV === 'development' ? err.stack : undefined, details: isHttpError ? (err as { cause?: unknown }).cause ?? null : null, }, isHttpError ? err.status : 500, diff --git a/betterbase/templates/base/src/routes/users.ts b/betterbase/templates/base/src/routes/users.ts index 29f17c6..22fff3d 100644 --- a/betterbase/templates/base/src/routes/users.ts +++ b/betterbase/templates/base/src/routes/users.ts @@ -1,6 +1,6 @@ import { Hono } from 'hono'; import { HTTPException } from 'hono/http-exception'; -import { z } from 'zod'; +import { z, ZodError } from 'zod'; import { db } from '../db'; import { users } from '../db/schema'; import { parseBody } from '../middleware/validation'; @@ -10,11 +10,69 @@ export const createUserSchema = z.object({ name: z.string().min(1), }); +const DEFAULT_LIMIT = 25; +const MAX_LIMIT = 100; +const DEFAULT_OFFSET = 0; + +const paginationSchema = z.object({ + limit: z.coerce.number().int().nonnegative().default(DEFAULT_LIMIT), + offset: z.coerce.number().int().nonnegative().default(DEFAULT_OFFSET), +}); + export const usersRoute = new Hono(); usersRoute.get('/', async (c) => { - const allUsers = await db.select().from(users); - return c.json({ users: allUsers }); + try { + const pagination = paginationSchema.parse({ + limit: c.req.query('limit'), + offset: c.req.query('offset'), + }); + + const limit = Math.min(pagination.limit, MAX_LIMIT); + const offset = pagination.offset; + + if (limit === 0) { + return c.json({ + users: [], + pagination: { + limit, + offset, + // No DB query is run for limit=0, so hasMore cannot be determined. + hasMore: null, + }, + }); + } + + const rows = await db.select().from(users).limit(limit + 1).offset(offset); + const hasMore = rows.length > limit; + const paginatedUsers = rows.slice(0, limit); + + return c.json({ + users: paginatedUsers, + pagination: { + limit, + offset, + hasMore, + }, + }); + } catch (error) { + if (error instanceof HTTPException) { + throw error; + } + + if (error instanceof ZodError) { + return c.json( + { + error: 'Invalid pagination query parameters', + details: error.issues, + }, + 400, + ); + } + + console.error('Failed to fetch users:', error); + throw error; + } }); usersRoute.post('/', async (c) => { diff --git a/betterbase/tsconfig.base.json b/betterbase/tsconfig.base.json index b1d6a90..574a5e4 100644 --- a/betterbase/tsconfig.base.json +++ b/betterbase/tsconfig.base.json @@ -8,6 +8,8 @@ "skipLibCheck": true, "resolveJsonModule": true, "isolatedModules": true, - "forceConsistentCasingInFileNames": true + "forceConsistentCasingInFileNames": true, + "declaration": true, + "declarationDir": "dist/types" } } From 2e57e1641dc1f03d71134629bea0b9bafbab45e7 Mon Sep 17 00:00:00 2001 From: Ziad Khaled Date: Thu, 19 Feb 2026 06:35:56 +0200 Subject: [PATCH 12/12] Address review findings across auth, realtime, migrate, and tooling --- betterbase/README.md | 2 + betterbase/package.json | 2 +- betterbase/packages/cli/src/build.ts | 5 +- betterbase/packages/cli/src/commands/auth.ts | 100 +++++++++-- betterbase/packages/cli/src/commands/dev.ts | 30 +++- .../packages/cli/src/commands/generate.ts | 162 +++++++----------- betterbase/packages/cli/src/commands/init.ts | 9 +- .../packages/cli/src/commands/migrate.ts | 140 +++++++-------- betterbase/packages/cli/src/constants.ts | 1 + .../cli/src/utils/context-generator.ts | 2 +- .../packages/cli/src/utils/route-scanner.ts | 2 +- betterbase/templates/base/src/index.ts | 29 ++-- betterbase/templates/base/src/lib/env.ts | 3 +- betterbase/templates/base/src/lib/realtime.ts | 93 +++++++--- betterbase/templates/base/src/routes/users.ts | 3 +- betterbase/tsconfig.base.json | 3 +- 16 files changed, 342 insertions(+), 244 deletions(-) diff --git a/betterbase/README.md b/betterbase/README.md index ebc5905..c1bd43f 100644 --- a/betterbase/README.md +++ b/betterbase/README.md @@ -38,4 +38,6 @@ From `templates/base`: - `bun run dev` - `bun run db:generate` - `bun run db:push` +- `bun run build` +- `bun run start` - `bun run typecheck` diff --git a/betterbase/package.json b/betterbase/package.json index 0f4c3d8..34bc804 100644 --- a/betterbase/package.json +++ b/betterbase/package.json @@ -1,7 +1,7 @@ { "name": "betterbase", "private": true, - "packageManager": "bun@1.2.14", + "packageManager": "bun@1.3.9", "workspaces": [ "apps/*", "packages/*" diff --git a/betterbase/packages/cli/src/build.ts b/betterbase/packages/cli/src/build.ts index 86937ef..80fa71a 100644 --- a/betterbase/packages/cli/src/build.ts +++ b/betterbase/packages/cli/src/build.ts @@ -16,16 +16,13 @@ export async function buildStandaloneCli(): Promise { minify: false, sourcemap: 'external', naming: 'index.js', + banner: '#!/usr/bin/env bun\n', }); if (!result.success) { const diagnostics = result.logs.map((log) => (typeof log === 'string' ? log : JSON.stringify(log))).join('\n'); throw new Error(`Build failed with ${result.logs.length} error(s).\n${diagnostics}`); } - - const outputPath = path.join(outdir, 'index.js'); - const compiled = await Bun.file(outputPath).text(); - await Bun.write(outputPath, `#!/usr/bin/env bun\n${compiled}`); } async function main(): Promise { diff --git a/betterbase/packages/cli/src/commands/auth.ts b/betterbase/packages/cli/src/commands/auth.ts index 3dbc76e..86c4952 100644 --- a/betterbase/packages/cli/src/commands/auth.ts +++ b/betterbase/packages/cli/src/commands/auth.ts @@ -32,16 +32,49 @@ import { users, sessions } from '../db/schema'; const authRoute = new Hono(); +const signupSchema = z.object({ + email: z.string().email(), + password: z.string().min(8), + name: z.string().min(1).optional(), +}); + const loginSchema = z.object({ email: z.string().email(), password: z.string().min(8), }); +authRoute.post('/signup', async (c) => { + const body = signupSchema.parse(await c.req.json()); + const passwordHash = await Bun.password.hash(body.password); + + const created = await db + .insert(users) + .values({ + email: body.email, + name: body.name ?? null, + passwordHash, + }) + .returning(); + + return c.json({ + user: { + id: created[0].id, + email: created[0].email, + name: created[0].name, + }, + }, 201); +}); + authRoute.post('/login', async (c) => { const body = loginSchema.parse(await c.req.json()); const user = await db.select().from(users).where(eq(users.email, body.email)).limit(1); - if (user.length === 0) { + if (user.length === 0 || !user[0].passwordHash) { + return c.json({ error: 'Invalid credentials' }, 401); + } + + const validPassword = await Bun.password.verify(body.password, user[0].passwordHash); + if (!validPassword) { return c.json({ error: 'Invalid credentials' }, 401); } @@ -157,6 +190,22 @@ function appendIfMissing(filePath: string, marker: string, content: string): voi writeFileSync(filePath, next); } +function ensurePasswordHashColumn(schemaPath: string): void { + const current = readFileSync(schemaPath, 'utf-8'); + if (/passwordHash\s*:\s*text\('password_hash'\)/.test(current)) { + return; + } + + const usersBlock = current.match(/export\s+const\s+users\s*=\s*sqliteTable\([^]+?\}\);/m); + if (!usersBlock) { + logger.warn('Could not find sqlite users table block; skipping passwordHash injection.'); + return; + } + + const replacement = usersBlock[0].replace(/\n\}\);$/, "\n passwordHash: text('password_hash').notNull(),\n});"); + writeFileSync(schemaPath, current.replace(usersBlock[0], replacement)); +} + function ensureAuthInConfig(projectRoot: string): void { const configPath = path.join(projectRoot, 'betterbase.config.ts'); if (!existsSync(configPath)) return; @@ -164,12 +213,30 @@ function ensureAuthInConfig(projectRoot: string): void { const current = readFileSync(configPath, 'utf-8'); if (current.includes('auth: {')) return; - const updated = current.replace( - 'export default {', - `export default {\n auth: {\n enabled: true,\n secret: process.env.AUTH_SECRET,\n sessionDuration: 30 * 24 * 60 * 60,\n },`, - ); + const patterns = [ + { + regex: /export\s+default\s+\{/, + replace: `export default {\n auth: {\n enabled: true,\n secret: process.env.AUTH_SECRET,\n sessionDuration: 30 * 24 * 60 * 60,\n },`, + }, + { + regex: /export\s+default\s+defineConfig\s*\(\s*\{/, + replace: `export default defineConfig({\n auth: {\n enabled: true,\n secret: process.env.AUTH_SECRET,\n sessionDuration: 30 * 24 * 60 * 60,\n },`, + }, + ]; + + let updated = current; + for (const pattern of patterns) { + if (pattern.regex.test(updated)) { + updated = updated.replace(pattern.regex, pattern.replace); + break; + } + } - writeFileSync(configPath, updated); + if (updated !== current) { + writeFileSync(configPath, updated); + } else { + logger.warn(`Could not automatically patch auth config in ${configPath}. Please add auth config manually.`); + } } function ensureEnvVar(projectRoot: string): void { @@ -215,15 +282,24 @@ export async function runAuthSetupCommand(projectRoot: string = process.cwd()): execSync('bun add better-auth', { cwd: resolvedRoot, stdio: 'inherit' }); logger.info('šŸ“ Adding auth tables to schema...'); + ensurePasswordHashColumn(schemaPath); appendIfMissing(schemaPath, "export const sessions = sqliteTable('sessions'", AUTH_SCHEMA_BLOCK); logger.info('šŸ›”ļø Creating auth middleware...'); mkdirSync(path.dirname(middlewarePath), { recursive: true }); - writeFileSync(middlewarePath, AUTH_MIDDLEWARE_FILE); + if (!existsSync(middlewarePath)) { + writeFileSync(middlewarePath, AUTH_MIDDLEWARE_FILE); + } else { + logger.warn(`Skipping existing middleware file: ${middlewarePath}`); + } logger.info('🧭 Creating auth routes...'); mkdirSync(path.dirname(routePath), { recursive: true }); - writeFileSync(routePath, AUTH_ROUTE_FILE); + if (!existsSync(routePath)) { + writeFileSync(routePath, AUTH_ROUTE_FILE); + } else { + logger.warn(`Skipping existing route file: ${routePath}`); + } ensureRoutesIndexHook(resolvedRoot); logger.info('āš™ļø Updating config...'); @@ -231,8 +307,8 @@ export async function runAuthSetupCommand(projectRoot: string = process.cwd()): ensureEnvVar(resolvedRoot); logger.success('Authentication setup complete!'); - console.log('\nNext steps:'); - console.log(' 1. Set AUTH_SECRET in .env'); - console.log(' 2. Run: bun run db:push'); - console.log(' 3. Use requireAuth middleware on protected routes'); + logger.info('Next steps:'); + logger.info('1. Set AUTH_SECRET in .env'); + logger.info('2. Run: bun run db:push'); + logger.info('3. Use requireAuth middleware on protected routes'); } diff --git a/betterbase/packages/cli/src/commands/dev.ts b/betterbase/packages/cli/src/commands/dev.ts index 3720944..762a021 100644 --- a/betterbase/packages/cli/src/commands/dev.ts +++ b/betterbase/packages/cli/src/commands/dev.ts @@ -1,15 +1,16 @@ -import { existsSync, watch } from 'node:fs'; +import { existsSync, watch, type FSWatcher } from 'node:fs'; import path from 'node:path'; import { ContextGenerator } from '../utils/context-generator'; import * as logger from '../utils/logger'; -export async function runDevCommand(projectRoot: string = process.cwd()): Promise { +export async function runDevCommand(projectRoot: string = process.cwd()): Promise<() => void> { const generator = new ContextGenerator(); await generator.generate(projectRoot); const watchPaths = [path.join(projectRoot, 'src/db/schema.ts'), path.join(projectRoot, 'src/routes')]; const timers = new Map>(); + const watchers: FSWatcher[] = []; for (const watchPath of watchPaths) { if (!existsSync(watchPath)) { @@ -18,8 +19,8 @@ export async function runDevCommand(projectRoot: string = process.cwd()): Promis } try { - watch(watchPath, { recursive: true }, (_eventType, filename) => { - console.log(`šŸ“ File changed: ${String(filename ?? '')}`); + const watcher = watch(watchPath, { recursive: true }, (_eventType, filename) => { + logger.info(`File changed: ${String(filename ?? '')}`); const existing = timers.get(watchPath); if (existing) { @@ -27,25 +28,38 @@ export async function runDevCommand(projectRoot: string = process.cwd()): Promis } const timer = setTimeout(async () => { - console.log('šŸ”„ Regenerating context...'); + logger.info('Regenerating context...'); const start = Date.now(); try { await generator.generate(projectRoot); - console.log(`āœ… Context updated in ${Date.now() - start}ms`); + logger.info(`Context updated in ${Date.now() - start}ms`); } catch (error) { const message = error instanceof Error ? error.message : String(error); - console.error(`āŒ Failed to regenerate context: ${message}`); + logger.error(`Failed to regenerate context: ${message}`); } }, 250); timers.set(watchPath, timer); }); + + watchers.push(watcher); } catch (error) { const message = error instanceof Error ? error.message : String(error); logger.warn(`Failed to watch path ${watchPath}: ${message}`); } } - console.log('šŸ‘€ Watching for schema and route changes...'); + logger.info('Watching for schema and route changes...'); + + return () => { + for (const timer of timers.values()) { + clearTimeout(timer); + } + timers.clear(); + + for (const watcher of watchers) { + watcher.close(); + } + }; } diff --git a/betterbase/packages/cli/src/commands/generate.ts b/betterbase/packages/cli/src/commands/generate.ts index 3035c08..1735e30 100644 --- a/betterbase/packages/cli/src/commands/generate.ts +++ b/betterbase/packages/cli/src/commands/generate.ts @@ -1,109 +1,51 @@ -import { execSync } from 'node:child_process'; import { existsSync, mkdirSync, readFileSync, writeFileSync } from 'node:fs'; import path from 'node:path'; import { SchemaScanner, type TableInfo } from '../utils/schema-scanner'; import * as logger from '../utils/logger'; - -const REALTIME_FILE = `import type { ServerWebSocket } from 'bun'; - -interface Client { - ws: ServerWebSocket; - subscriptions: Set; -} - -class RealtimeServer { - private clients = new Map, Client>(); - private tableSubscribers = new Map>>(); - - handleConnection(ws: ServerWebSocket): void { - this.clients.set(ws, { ws, subscriptions: new Set() }); +function toSingular(name: string): string { + const lower = name.toLowerCase(); + const invariants = new Set(['status', 'news', 'series']); + if (invariants.has(lower)) { + return name; } - handleMessage(ws: ServerWebSocket, rawMessage: string): void { - try { - const data = JSON.parse(rawMessage) as { type?: string; table?: string }; - if (!data.type || !data.table) return; - - if (data.type === 'subscribe') { - if (!this.tableSubscribers.has(data.table)) this.tableSubscribers.set(data.table, new Set()); - this.tableSubscribers.get(data.table)?.add(ws); - } - - if (data.type === 'unsubscribe') { - this.tableSubscribers.get(data.table)?.delete(ws); - } - } catch { - // ignore malformed payloads - } + if (/men$/i.test(name)) { + return name.replace(/men$/i, 'man'); } - handleClose(ws: ServerWebSocket): void { - this.clients.delete(ws); - for (const subscribers of this.tableSubscribers.values()) { - subscribers.delete(ws); - } + if (/ies$/i.test(name)) { + return name.replace(/ies$/i, 'y'); } - broadcast(table: string, event: 'INSERT' | 'UPDATE' | 'DELETE', data: unknown): void { - const subscribers = this.tableSubscribers.get(table); - if (!subscribers) return; - - const payload = JSON.stringify({ type: 'update', table, event, data, timestamp: new Date().toISOString() }); - for (const ws of subscribers) { - ws.send(payload); - } + if (/(ses|xes|zes|ches|shes)$/i.test(name)) { + return name.replace(/es$/i, ''); } -} - -export const realtime = new RealtimeServer(); -`; -function ensureRealtimeUtility(projectRoot: string): void { - const realtimePath = path.join(projectRoot, 'src/lib/realtime.ts'); - if (existsSync(realtimePath)) { - return; + if (name.endsWith('s') && !name.endsWith('ss')) { + return name.slice(0, -1); } - mkdirSync(path.dirname(realtimePath), { recursive: true }); - writeFileSync(realtimePath, REALTIME_FILE); -} - - -function toSingular(name: string): string { - return name.endsWith('s') ? name.slice(0, -1) : `${name}Item`; + return `${name}Item`; } function schemaTypeToZod(type: string): string { - if (type === 'integer' || type === 'number') { - return 'z.coerce.number()'; - } - - if (type === 'boolean') { - return 'z.coerce.boolean()'; - } - - if (type === 'json') { - return 'z.unknown()'; - } - - if (type === 'datetime') { - return 'z.coerce.date()'; - } - + if (type === 'integer' || type === 'number') return 'z.coerce.number()'; + if (type === 'boolean') return 'z.coerce.boolean()'; + if (type === 'json') return 'z.unknown()'; + if (type === 'datetime') return 'z.coerce.date()'; return 'z.string()'; } function buildSchemaShape(table: TableInfo, mode: 'create' | 'update'): string { - const entries = Object.entries(table.columns) + return Object.entries(table.columns) .filter(([columnName, column]) => !(column.primaryKey || columnName === 'id')) .map(([columnName, column]) => { const base = schemaTypeToZod(column.type); const optional = mode === 'update' || column.nullable || Boolean(column.defaultValue); return ` ${columnName}: ${optional ? `${base}.optional()` : base}`; - }); - - return entries.join(',\n'); + }) + .join(',\n'); } function generateRouteFile(tableName: string, table: TableInfo): string { @@ -138,9 +80,7 @@ ${tableName}Route.get('/', async (c) => { const queryParams = c.req.query(); const sort = queryParams.sort; - const filters = Object.entries(queryParams).filter(([key, value]) => { - return key !== 'limit' && key !== 'offset' && key !== 'sort' && value !== undefined; - }); + const filters = Object.entries(queryParams).filter(([key, value]) => key !== 'limit' && key !== 'offset' && key !== 'sort' && value !== undefined); let query = db.select().from(${tableName}).$dynamic(); @@ -224,11 +164,7 @@ function updateMainRouter(projectRoot: string, tableName: string): void { if (!router.includes(importLine)) { const firstRouteImport = /import\s+\{\s*healthRoute\s*\}\s+from\s+'\.\/health';/; - if (firstRouteImport.test(router)) { - router = router.replace(firstRouteImport, (m) => `${m}\n${importLine}`); - } else { - router = `${importLine}\n${router}`; - } + router = firstRouteImport.test(router) ? router.replace(firstRouteImport, (m) => `${m}\n${importLine}`) : `${importLine}\n${router}`; } if (!router.includes(routeLine)) { @@ -245,6 +181,40 @@ function updateMainRouter(projectRoot: string, tableName: string): void { writeFileSync(routerPath, router); } +function ensureRealtimeUtility(projectRoot: string): void { + const realtimePath = path.join(projectRoot, 'src/lib/realtime.ts'); + if (existsSync(realtimePath)) return; + + const canonicalRealtimePath = path.resolve(import.meta.dir, '../../../templates/base/src/lib/realtime.ts'); + if (!existsSync(canonicalRealtimePath)) { + throw new Error(`Canonical realtime template not found at ${canonicalRealtimePath}`); + } + + mkdirSync(path.dirname(realtimePath), { recursive: true }); + writeFileSync(realtimePath, readFileSync(canonicalRealtimePath, 'utf-8')); +} + +async function ensureZodValidatorInstalled(projectRoot: string): Promise { + logger.info('Installing @hono/zod-validator...'); + const process = Bun.spawn(['bun', 'add', '@hono/zod-validator'], { + cwd: projectRoot, + stdout: 'pipe', + stderr: 'pipe', + }); + + const [exitCode, stdout, stderr] = await Promise.all([ + process.exited, + new Response(process.stdout).text(), + new Response(process.stderr).text(), + ]); + + if (exitCode !== 0) { + if (stdout.trim()) logger.warn(stdout.trim()); + if (stderr.trim()) logger.error(stderr.trim()); + throw new Error('Failed to install @hono/zod-validator.'); + } +} + export async function runGenerateCrudCommand(projectRoot: string, tableName: string): Promise { const resolvedRoot = path.resolve(projectRoot); const schemaPath = path.join(resolvedRoot, 'src/db/schema.ts'); @@ -253,19 +223,16 @@ export async function runGenerateCrudCommand(projectRoot: string, tableName: str throw new Error(`Schema file not found at ${schemaPath}`); } - logger.info(`šŸ”Ø Generating CRUD for ${tableName}...`); + logger.info(`Generating CRUD for ${tableName}...`); const scanner = new SchemaScanner(schemaPath); const tables = scanner.scan(); - const table = tables[tableName]; if (!table) { throw new Error(`Table "${tableName}" not found in schema.`); } - logger.info('šŸ“¦ Installing @hono/zod-validator...'); - execSync('bun add @hono/zod-validator', { cwd: resolvedRoot, stdio: 'inherit' }); - + await ensureZodValidatorInstalled(resolvedRoot); ensureRealtimeUtility(resolvedRoot); const routesDir = path.join(resolvedRoot, 'src/routes'); @@ -277,10 +244,9 @@ export async function runGenerateCrudCommand(projectRoot: string, tableName: str updateMainRouter(resolvedRoot, tableName); logger.success(`Generated ${routePath}`); - console.log('\nEndpoints created:'); - console.log(` GET /api/${tableName}`); - console.log(` GET /api/${tableName}/:id`); - console.log(` POST /api/${tableName}`); - console.log(` PATCH /api/${tableName}/:id`); - console.log(` DELETE /api/${tableName}/:id`); + logger.info(`GET /api/${tableName}`); + logger.info(`GET /api/${tableName}/:id`); + logger.info(`POST /api/${tableName}`); + logger.info(`PATCH /api/${tableName}/:id`); + logger.info(`DELETE /api/${tableName}/:id`); } diff --git a/betterbase/packages/cli/src/commands/init.ts b/betterbase/packages/cli/src/commands/init.ts index abe5481..0e7c6ac 100644 --- a/betterbase/packages/cli/src/commands/init.ts +++ b/betterbase/packages/cli/src/commands/init.ts @@ -380,8 +380,12 @@ async function writeProjectFiles( path.join(projectPath, 'src/lib/env.ts'), `import { z } from 'zod'; +export const DEFAULT_DB_PATH = 'local.db'; + const envSchema = z.object({ NODE_ENV: z.enum(['development', 'test', 'production']).default('development'), + PORT: z.coerce.number().int().positive().default(3000), + DB_PATH: z.string().min(1).default(DEFAULT_DB_PATH), }); export const env = envSchema.parse(process.env); @@ -609,6 +613,7 @@ usersRoute.post('/', async (c) => { await writeFile( path.join(projectPath, 'src/index.ts'), `import { Hono } from 'hono'; +import { env } from './lib/env'; import { registerRoutes } from './routes'; const app = new Hono(); @@ -616,8 +621,8 @@ registerRoutes(app); const server = Bun.serve({ fetch: app.fetch, - port: Number(process.env.PORT ?? 3000), - development: process.env.NODE_ENV === 'development', + port: env.PORT, + development: env.NODE_ENV === 'development', }); console.log(\`šŸš€ Server running at http://localhost:\${server.port}\`); diff --git a/betterbase/packages/cli/src/commands/migrate.ts b/betterbase/packages/cli/src/commands/migrate.ts index 8999867..af22934 100644 --- a/betterbase/packages/cli/src/commands/migrate.ts +++ b/betterbase/packages/cli/src/commands/migrate.ts @@ -14,12 +14,7 @@ const migrateOptionsSchema = z.object({ export type MigrateCommandOptions = z.infer; -export type MigrationChangeType = - | 'create_table' - | 'add_column' - | 'modify_column' - | 'drop_column' - | 'drop_table'; +export type MigrationChangeType = 'create_table' | 'add_column' | 'modify_column' | 'drop_column' | 'drop_table'; export interface MigrationChange { type: MigrationChangeType; @@ -41,25 +36,40 @@ interface MigrationBackup { } const DRIZZLE_DIR = 'drizzle'; +const DRIZZLE_TIMEOUT_MS = 30_000; + +function captureIdentifier(match: RegExpMatchArray, startIndex: number): string { + return match[startIndex] ?? match[startIndex + 1] ?? match[startIndex + 2] ?? ''; +} async function runDrizzleKit(args: string[]): Promise { + const controller = new AbortController(); + const timeout = setTimeout(() => controller.abort(), DRIZZLE_TIMEOUT_MS); + const proc = Bun.spawn(['bunx', 'drizzle-kit', ...args], { cwd: process.cwd(), stdout: 'pipe', stderr: 'pipe', + signal: controller.signal, }); - const [stdout, stderr, exitCode] = await Promise.all([ - new Response(proc.stdout).text(), - new Response(proc.stderr).text(), - proc.exited, - ]); + try { + const [stdout, stderr, exitCode] = await Promise.all([ + new Response(proc.stdout).text(), + new Response(proc.stderr).text(), + proc.exited, + ]); - return { - success: exitCode === 0, - stdout, - stderr, - }; + return { success: exitCode === 0, stdout, stderr }; + } catch { + return { + success: false, + stdout: '', + stderr: `drizzle-kit ${args.join(' ')} timed out after ${DRIZZLE_TIMEOUT_MS / 1000}s`, + }; + } finally { + clearTimeout(timeout); + } } async function listSqlFiles(baseDir: string): Promise> { @@ -95,43 +105,46 @@ async function listSqlFiles(baseDir: string): Promise> { function analyzeMigration(sqlStatements: string[]): MigrationChange[] { const changes: MigrationChange[] = []; + const ident = '(?:"([^"]+)"|`([^`]+)`|([\\w.-]+))'; for (const statement of sqlStatements) { const sql = statement.trim(); - if (!sql) { - continue; - } + if (!sql) continue; - const createTable = sql.match(/create\s+table\s+"?([\w.-]+)"?/i); + const createTable = sql.match(new RegExp(`create\\s+table(?:\\s+if\\s+not\\s+exists)?\\s+${ident}`, 'i')); if (createTable) { - changes.push({ type: 'create_table', table: createTable[1], isDestructive: false, detail: sql }); + changes.push({ type: 'create_table', table: captureIdentifier(createTable, 1), isDestructive: false, detail: sql }); continue; } - const dropTable = sql.match(/drop\s+table\s+"?([\w.-]+)"?/i); + const dropTable = sql.match(new RegExp(`drop\\s+table(?:\\s+if\\s+exists)?\\s+${ident}`, 'i')); if (dropTable) { - changes.push({ type: 'drop_table', table: dropTable[1], isDestructive: true, detail: sql }); + changes.push({ type: 'drop_table', table: captureIdentifier(dropTable, 1), isDestructive: true, detail: sql }); continue; } - const addColumn = sql.match(/alter\s+table\s+"?([\w.-]+)"?\s+add\s+column\s+"?([\w.-]+)"?/i); + const addColumn = sql.match( + new RegExp(`alter\\s+table(?:\\s+if\\s+exists)?\\s+${ident}\\s+add\\s+column(?:\\s+if\\s+not\\s+exists)?\\s+${ident}`, 'i'), + ); if (addColumn) { changes.push({ type: 'add_column', - table: addColumn[1], - column: addColumn[2], + table: captureIdentifier(addColumn, 1), + column: captureIdentifier(addColumn, 4), isDestructive: false, detail: sql, }); continue; } - const dropColumn = sql.match(/alter\s+table\s+"?([\w.-]+)"?\s+drop\s+column\s+"?([\w.-]+)"?/i); + const dropColumn = sql.match( + new RegExp(`alter\\s+table(?:\\s+if\\s+exists)?\\s+${ident}\\s+drop\\s+column(?:\\s+if\\s+exists)?\\s+${ident}`, 'i'), + ); if (dropColumn) { changes.push({ type: 'drop_column', - table: dropColumn[1], - column: dropColumn[2], + table: captureIdentifier(dropColumn, 1), + column: captureIdentifier(dropColumn, 4), isDestructive: true, detail: sql, }); @@ -139,17 +152,19 @@ function analyzeMigration(sqlStatements: string[]): MigrationChange[] { } const alterColumn = sql.match( - /alter\s+table\s+"?([\w.-]+)"?\s+(alter\s+column\s+"?([\w.-]+)"?|rename\s+column\s+"?([\w.-]+)"?)/i, + new RegExp( + `alter\\s+table(?:\\s+if\\s+exists)?\\s+${ident}\\s+(?:alter\\s+column\\s+${ident}|rename\\s+column\\s+${ident})`, + 'i', + ), ); if (alterColumn) { changes.push({ type: 'modify_column', - table: alterColumn[1], - column: alterColumn[3] ?? alterColumn[4], + table: captureIdentifier(alterColumn, 1), + column: captureIdentifier(alterColumn, 4) || captureIdentifier(alterColumn, 7), isDestructive: /drop\s+not\s+null|set\s+not\s+null|set\s+data\s+type|rename\s+column/i.test(sql), detail: sql, }); - continue; } } @@ -169,50 +184,40 @@ function displayDiff(changes: MigrationChange[]): void { const modified = changes.filter((c) => c.type === 'modify_column'); const destructive = changes.filter((c) => c.isDestructive); - if (newTables.length > 0) { + if (newTables.length) { console.log(chalk.green('āœ… New Tables:')); - for (const change of newTables) { - console.log(chalk.green(` + ${change.table}`)); - } + newTables.forEach((change) => console.log(chalk.green(` + ${change.table}`))); console.log(''); } - if (newColumns.length > 0) { + if (newColumns.length) { console.log(chalk.green('āœ… New Columns:')); - for (const change of newColumns) { - console.log(chalk.green(` + ${change.table}.${change.column ?? ''}`)); - } + newColumns.forEach((change) => console.log(chalk.green(` + ${change.table}.${change.column ?? ''}`))); console.log(''); } - if (modified.length > 0) { + if (modified.length) { console.log(chalk.yellow('āš ļø Modified Columns:')); - for (const change of modified) { - console.log(chalk.yellow(` ! ${change.table}.${change.column ?? ''}`)); - } + modified.forEach((change) => console.log(chalk.yellow(` ! ${change.table}.${change.column ?? ''}`))); console.log(''); } - if (destructive.length > 0) { + if (destructive.length) { console.log(chalk.red('āŒ Destructive Changes:')); - for (const change of destructive) { + destructive.forEach((change) => { console.log(chalk.red(` - ${change.type}: ${change.table}${change.column ? `.${change.column}` : ''}`)); console.log(chalk.red(' āš ļø This will DELETE DATA')); - } + }); console.log(''); } } async function confirmDestructive(changes: MigrationChange[]): Promise { const destructive = changes.filter((c) => c.isDestructive); - if (destructive.length === 0) { - return true; - } + if (destructive.length === 0) return true; logger.warn('DESTRUCTIVE CHANGES DETECTED:'); - for (const change of destructive) { - console.log(` - ${change.type}: ${change.table}${change.column ? `.${change.column}` : ''}`); - } + destructive.forEach((change) => console.log(` - ${change.type}: ${change.table}${change.column ? `.${change.column}` : ''}`)); const confirmation = await prompts.text({ message: 'Type "delete data" to confirm:' }); if (confirmation !== 'delete data') { @@ -241,8 +246,7 @@ async function backupDatabase(): Promise { const db = new Database(sourcePath, { readonly: true }); try { - const snapshot = db.serialize(); - await Bun.write(backupPath, snapshot); + await Bun.write(backupPath, db.serialize()); } finally { db.close(); } @@ -252,13 +256,9 @@ async function backupDatabase(): Promise { } async function restoreBackup(backup: MigrationBackup | null): Promise { - if (backup === null) { - return; - } - + if (backup === null) return; const bytes = await Bun.file(backup.backupPath).bytes(); await Bun.write(backup.sourcePath, bytes); - logger.warn(`Rollback complete. Restored database from ${backup.backupPath}`); } @@ -286,10 +286,10 @@ async function collectChangesFromGenerate(): Promise { for (const [relativePath, content] of after.entries()) { const previous = before.get(relativePath); - if (previous === content) { - continue; - } + if (previous === content) continue; + // Intentionally analyze full changed file content: drizzle-kit typically creates new migration files, + // so whole-file analysis is simpler and reliable. If in-place edits become common, switch to a true diff. changedSql.push(...splitStatements(content)); } @@ -309,11 +309,7 @@ export async function runMigrateCommand(rawOptions: MigrateCommandOptions): Prom } if (options.production) { - const proceed = await prompts.confirm({ - message: 'Apply migrations to production now?', - initial: false, - }); - + const proceed = await prompts.confirm({ message: 'Apply migrations to production now?', initial: false }); if (!proceed) { logger.warn('Migration cancelled by user.'); return; @@ -321,14 +317,10 @@ export async function runMigrateCommand(rawOptions: MigrateCommandOptions): Prom } let backup: MigrationBackup | null = null; - if (changes.some((change) => change.isDestructive)) { backup = await backupDatabase(); - const confirmed = await confirmDestructive(changes); - if (!confirmed) { - return; - } + if (!confirmed) return; } logger.info('Applying migrations with drizzle-kit push...'); diff --git a/betterbase/packages/cli/src/constants.ts b/betterbase/packages/cli/src/constants.ts index 8daff7f..4190ff6 100644 --- a/betterbase/packages/cli/src/constants.ts +++ b/betterbase/packages/cli/src/constants.ts @@ -1 +1,2 @@ +// Keep in sync with templates/base/src/lib/env.ts DEFAULT_DB_PATH. export const DEFAULT_DB_PATH = 'local.db'; diff --git a/betterbase/packages/cli/src/utils/context-generator.ts b/betterbase/packages/cli/src/utils/context-generator.ts index aabf0f4..794c8ee 100644 --- a/betterbase/packages/cli/src/utils/context-generator.ts +++ b/betterbase/packages/cli/src/utils/context-generator.ts @@ -74,7 +74,7 @@ export class ContextGenerator { } prompt += '\nWhen writing code for this project:\n'; - prompt += "1. Always import tables from './src/db/schema'\n"; + prompt += "1. Always import tables from '../db/schema'\n"; prompt += '2. Use Drizzle ORM for database queries\n'; prompt += '3. Validate inputs with Zod\n'; prompt += '4. Return JSON responses with proper status codes\n'; diff --git a/betterbase/packages/cli/src/utils/route-scanner.ts b/betterbase/packages/cli/src/utils/route-scanner.ts index 5fc7aa9..d5cd2c9 100644 --- a/betterbase/packages/cli/src/utils/route-scanner.ts +++ b/betterbase/packages/cli/src/utils/route-scanner.ts @@ -145,7 +145,7 @@ export class RouteScanner { return routes; } - private findSchemaUsage(sourceFile: ts.SourceFile, args: ts.NodeArray, mode: 'input' | 'output'): string | undefined { + private findSchemaUsage(sourceFile: ts.SourceFile, args: readonly ts.Expression[], mode: 'input' | 'output'): string | undefined { const text = args.map((arg) => arg.getText(sourceFile)).join('\n'); if (mode === 'input') { diff --git a/betterbase/templates/base/src/index.ts b/betterbase/templates/base/src/index.ts index 29fbe4b..b8f9534 100644 --- a/betterbase/templates/base/src/index.ts +++ b/betterbase/templates/base/src/index.ts @@ -1,26 +1,33 @@ import { Hono } from 'hono'; +import { upgradeWebSocket } from 'hono/bun'; import { env } from './lib/env'; import { realtime } from './lib/realtime'; import { registerRoutes } from './routes'; const app = new Hono(); + +app.get( + '/ws', + upgradeWebSocket(() => ({ + onOpen(_event, ws) { + realtime.handleConnection(ws.raw); + }, + onMessage(event, ws) { + const message = typeof event.data === 'string' ? event.data : event.data.toString(); + realtime.handleMessage(ws.raw, message); + }, + onClose(_event, ws) { + realtime.handleClose(ws.raw); + }, + })), +); + registerRoutes(app); const server = Bun.serve({ fetch: app.fetch, port: env.PORT, development: env.NODE_ENV === 'development', - websocket: { - open(ws) { - realtime.handleConnection(ws); - }, - message(ws, message) { - realtime.handleMessage(ws, message.toString()); - }, - close(ws) { - realtime.handleClose(ws); - }, - }, }); console.log(`šŸš€ Server running at http://localhost:${server.port}`); diff --git a/betterbase/templates/base/src/lib/env.ts b/betterbase/templates/base/src/lib/env.ts index c61b432..9967db2 100644 --- a/betterbase/templates/base/src/lib/env.ts +++ b/betterbase/templates/base/src/lib/env.ts @@ -1,10 +1,11 @@ import { z } from 'zod'; +// Keep in sync with packages/cli/src/constants.ts DEFAULT_DB_PATH. export const DEFAULT_DB_PATH = 'local.db'; const envSchema = z.object({ NODE_ENV: z.enum(['development', 'test', 'production']).default('development'), - PORT: z.coerce.number().default(3000), + PORT: z.coerce.number().int().positive().default(3000), DB_PATH: z.string().min(1).default(DEFAULT_DB_PATH), }); diff --git a/betterbase/templates/base/src/lib/realtime.ts b/betterbase/templates/base/src/lib/realtime.ts index 7cb3fd1..ace9c71 100644 --- a/betterbase/templates/base/src/lib/realtime.ts +++ b/betterbase/templates/base/src/lib/realtime.ts @@ -7,7 +7,7 @@ export interface Subscription { interface Client { ws: ServerWebSocket; - subscriptions: Set; + subscriptions: Map; } interface RealtimeUpdatePayload { @@ -18,15 +18,21 @@ interface RealtimeUpdatePayload { timestamp: string; } +const realtimeLogger = { + debug: (message: string): void => console.debug(`[realtime] ${message}`), + info: (message: string): void => console.info(`[realtime] ${message}`), + warn: (message: string): void => console.warn(`[realtime] ${message}`), +}; + export class RealtimeServer { private clients = new Map, Client>(); private tableSubscribers = new Map>>(); handleConnection(ws: ServerWebSocket): void { - console.log('Client connected'); + realtimeLogger.info('Client connected'); this.clients.set(ws, { ws, - subscriptions: new Set(), + subscriptions: new Map(), }); } @@ -35,7 +41,7 @@ export class RealtimeServer { const data = JSON.parse(rawMessage) as { type?: string; table?: string; filter?: Record }; if (!data.type || !data.table) { - ws.send(JSON.stringify({ error: 'Message must include type and table' })); + this.safeSend(ws, { error: 'Message must include type and table' }); return; } @@ -47,20 +53,20 @@ export class RealtimeServer { this.unsubscribe(ws, data.table); break; default: - ws.send(JSON.stringify({ error: 'Unknown message type' })); + this.safeSend(ws, { error: 'Unknown message type' }); break; } } catch { - ws.send(JSON.stringify({ error: 'Invalid message format' })); + this.safeSend(ws, { error: 'Invalid message format' }); } } handleClose(ws: ServerWebSocket): void { - console.log('Client disconnected'); + realtimeLogger.info('Client disconnected'); const client = this.clients.get(ws); if (client) { - for (const table of client.subscriptions) { + for (const table of client.subscriptions.keys()) { const subscribers = this.tableSubscribers.get(table); subscribers?.delete(ws); @@ -79,6 +85,8 @@ export class RealtimeServer { return; } + const initialCount = subscribers.size; + const payload: RealtimeUpdatePayload = { type: 'update', table, @@ -89,15 +97,20 @@ export class RealtimeServer { const message = JSON.stringify(payload); - for (const ws of subscribers) { - try { - ws.send(message); - } catch { + for (const ws of [...subscribers]) { + const client = this.clients.get(ws); + const subscription = client?.subscriptions.get(table); + if (!this.matchesFilter(subscription?.filter, data)) { + continue; + } + + if (!this.safeSend(ws, message)) { + subscribers.delete(ws); this.handleClose(ws); } } - console.log(`Broadcasted ${event} on ${table} to ${subscribers.size} clients`); + realtimeLogger.debug(`Broadcasted ${event} on ${table} to ${initialCount} clients`); } private subscribe(ws: ServerWebSocket, table: string, filter?: Record): void { @@ -106,7 +119,7 @@ export class RealtimeServer { return; } - client.subscriptions.add(table); + client.subscriptions.set(table, { table, filter }); if (!this.tableSubscribers.has(table)) { this.tableSubscribers.set(table, new Set()); @@ -114,15 +127,13 @@ export class RealtimeServer { this.tableSubscribers.get(table)?.add(ws); - ws.send( - JSON.stringify({ - type: 'subscribed', - table, - filter, - }), - ); + this.safeSend(ws, { + type: 'subscribed', + table, + filter, + }); - console.log(`Client subscribed to ${table}`); + realtimeLogger.debug(`Client subscribed to ${table}`); } private unsubscribe(ws: ServerWebSocket, table: string): void { @@ -139,12 +150,38 @@ export class RealtimeServer { this.tableSubscribers.delete(table); } - ws.send( - JSON.stringify({ - type: 'unsubscribed', - table, - }), - ); + this.safeSend(ws, { + type: 'unsubscribed', + table, + }); + } + + private matchesFilter(filter: Record | undefined, payload: unknown): boolean { + if (!filter || Object.keys(filter).length === 0) { + return true; + } + + if (!payload || typeof payload !== 'object') { + return false; + } + + const data = payload as Record; + return Object.entries(filter).every(([key, value]) => data[key] === value); + } + + private safeSend(ws: ServerWebSocket, payload: object | string): boolean { + if (ws.readyState !== WebSocket.OPEN) { + return false; + } + + try { + ws.send(typeof payload === 'string' ? payload : JSON.stringify(payload)); + return true; + } catch (error) { + const message = error instanceof Error ? error.message : String(error); + realtimeLogger.warn(`WebSocket send failed: ${message}`); + return false; + } } } diff --git a/betterbase/templates/base/src/routes/users.ts b/betterbase/templates/base/src/routes/users.ts index 22fff3d..7d9504c 100644 --- a/betterbase/templates/base/src/routes/users.ts +++ b/betterbase/templates/base/src/routes/users.ts @@ -1,3 +1,4 @@ +import { asc } from 'drizzle-orm'; import { Hono } from 'hono'; import { HTTPException } from 'hono/http-exception'; import { z, ZodError } from 'zod'; @@ -43,7 +44,7 @@ usersRoute.get('/', async (c) => { }); } - const rows = await db.select().from(users).limit(limit + 1).offset(offset); + const rows = await db.select().from(users).orderBy(asc(users.id)).limit(limit + 1).offset(offset); const hasMore = rows.length > limit; const paginatedUsers = rows.slice(0, limit); diff --git a/betterbase/tsconfig.base.json b/betterbase/tsconfig.base.json index 574a5e4..e86a10c 100644 --- a/betterbase/tsconfig.base.json +++ b/betterbase/tsconfig.base.json @@ -9,7 +9,6 @@ "resolveJsonModule": true, "isolatedModules": true, "forceConsistentCasingInFileNames": true, - "declaration": true, - "declarationDir": "dist/types" + "declaration": true } }