diff --git a/betterbase/.gitignore b/betterbase/.gitignore index 51c8bb1..240b1f4 100644 --- a/betterbase/.gitignore +++ b/betterbase/.gitignore @@ -3,14 +3,17 @@ node_modules .turbo dist .next +*.sqlite3 +*.sqlite +*.db +*.tsbuildinfo +.betterbase-context.json .vscode/ .idea/ .env .env.* -.env.local -.env.test !.env.example *.log diff --git a/betterbase/README.md b/betterbase/README.md index 46bf456..c1bd43f 100644 --- a/betterbase/README.md +++ b/betterbase/README.md @@ -19,6 +19,18 @@ Initial BetterBase monorepo scaffold with a concrete base template. - Workspace orchestration: **Turborepo** - Language: **TypeScript** + +## Monorepo Commands + +From the monorepo root: + +- `bun install` +- `bun run dev` +- `bun run build` +- `bun run typecheck` (runs `turbo run typecheck --filter '*'`) + +> Note: `templates/base` is not in the root workspace graph (`apps/*`, `packages/*`), so run template checks separately (e.g. `cd templates/base && bun run typecheck`). + ## Base Template Commands From `templates/base`: @@ -26,4 +38,6 @@ From `templates/base`: - `bun run dev` - `bun run db:generate` - `bun run db:push` +- `bun run build` +- `bun run start` - `bun run typecheck` diff --git a/betterbase/apps/cli/tsconfig.json b/betterbase/apps/cli/tsconfig.json index 4031161..cd17ee5 100644 --- a/betterbase/apps/cli/tsconfig.json +++ b/betterbase/apps/cli/tsconfig.json @@ -3,7 +3,14 @@ "compilerOptions": { "outDir": "dist", "rootDir": ".", - "types": ["bun"] + "types": [ + "bun" + ] }, - "include": ["src", "test"] + "include": [ + "src/**/*.ts", + "src/**/*.tsx", + "test/**/*.ts", + "test/**/*.tsx" + ] } diff --git a/betterbase/package.json b/betterbase/package.json index eaa5a17..34bc804 100644 --- a/betterbase/package.json +++ b/betterbase/package.json @@ -1,7 +1,7 @@ { "name": "betterbase", "private": true, - "packageManager": "bun@1.1.38", + "packageManager": "bun@1.3.9", "workspaces": [ "apps/*", "packages/*" @@ -10,7 +10,7 @@ "build": "turbo run build", "dev": "turbo run dev --parallel", "lint": "turbo run lint", - "typecheck": "turbo run typecheck" + "typecheck": "turbo run typecheck --filter '*'" }, "devDependencies": { "turbo": "^2.0.0", diff --git a/betterbase/packages/cli/package.json b/betterbase/packages/cli/package.json index 5b8543d..54f6fb8 100644 --- a/betterbase/packages/cli/package.json +++ b/betterbase/packages/cli/package.json @@ -16,11 +16,11 @@ "chalk": "^5.3.0", "commander": "^12.1.0", "inquirer": "^10.2.2", - "zod": "^3.23.8" + "zod": "^3.23.8", + "typescript": "^5.3.0" }, "devDependencies": { - "@types/bun": "^1.3.9", - "typescript": "^5.9.3" + "@types/bun": "^1.3.9" }, "exports": { ".": "./src/index.ts" diff --git a/betterbase/packages/cli/src/build.ts b/betterbase/packages/cli/src/build.ts index 198205e..80fa71a 100644 --- a/betterbase/packages/cli/src/build.ts +++ b/betterbase/packages/cli/src/build.ts @@ -1,36 +1,35 @@ +import path from 'node:path'; + /** * Build the CLI as a standalone bundled executable output. */ export async function buildStandaloneCli(): Promise { + const moduleDir = import.meta.dir; + const entrypoint = path.resolve(moduleDir, 'index.ts'); + const outdir = path.resolve(moduleDir, '../dist'); + const result = await Bun.build({ - entrypoints: ['./src/index.ts'], - outdir: './dist', + entrypoints: [entrypoint], + outdir, target: 'bun', format: 'esm', minify: false, sourcemap: 'external', naming: 'index.js', + banner: '#!/usr/bin/env bun\n', }); if (!result.success) { const diagnostics = result.logs.map((log) => (typeof log === 'string' ? log : JSON.stringify(log))).join('\n'); throw new Error(`Build failed with ${result.logs.length} error(s).\n${diagnostics}`); } - - const outputPath = './dist/index.js'; - const compiled = await Bun.file(outputPath).text(); - await Bun.write(outputPath, `#!/usr/bin/env bun\n${compiled}`); } async function main(): Promise { await buildStandaloneCli(); } -const isEsmMain = typeof import.meta !== 'undefined' && import.meta.main; -const cjs = globalThis as unknown as { require?: { main?: unknown }; module?: unknown }; -const isCjsMain = cjs.require?.main !== undefined && cjs.require.main === cjs.module; - -if (isEsmMain || isCjsMain) { +if (import.meta.main) { main().catch((error) => { console.error('Build failed:', error); process.exit(1); diff --git a/betterbase/packages/cli/src/commands/auth.ts b/betterbase/packages/cli/src/commands/auth.ts new file mode 100644 index 0000000..86c4952 --- /dev/null +++ b/betterbase/packages/cli/src/commands/auth.ts @@ -0,0 +1,314 @@ +import { execSync } from 'node:child_process'; +import { existsSync, mkdirSync, readFileSync, writeFileSync } from 'node:fs'; +import path from 'node:path'; +import * as logger from '../utils/logger'; + +const AUTH_SCHEMA_BLOCK = ` +// Auth tables (generated by BetterAuth) +export const sessions = sqliteTable('sessions', { + id: text('id').primaryKey(), + userId: text('user_id').notNull().references(() => users.id), + expiresAt: integer('expires_at', { mode: 'timestamp' }).notNull(), + ipAddress: text('ip_address'), + userAgent: text('user_agent'), +}); + +export const accounts = sqliteTable('accounts', { + id: text('id').primaryKey(), + userId: text('user_id').notNull().references(() => users.id), + provider: text('provider').notNull(), + providerAccountId: text('provider_account_id').notNull(), + accessToken: text('access_token'), + refreshToken: text('refresh_token'), + expiresAt: integer('expires_at', { mode: 'timestamp' }), +}); +`; + +const AUTH_ROUTE_FILE = `import { Hono } from 'hono'; +import { z } from 'zod'; +import { eq } from 'drizzle-orm'; +import { db } from '../db'; +import { users, sessions } from '../db/schema'; + +const authRoute = new Hono(); + +const signupSchema = z.object({ + email: z.string().email(), + password: z.string().min(8), + name: z.string().min(1).optional(), +}); + +const loginSchema = z.object({ + email: z.string().email(), + password: z.string().min(8), +}); + +authRoute.post('/signup', async (c) => { + const body = signupSchema.parse(await c.req.json()); + const passwordHash = await Bun.password.hash(body.password); + + const created = await db + .insert(users) + .values({ + email: body.email, + name: body.name ?? null, + passwordHash, + }) + .returning(); + + return c.json({ + user: { + id: created[0].id, + email: created[0].email, + name: created[0].name, + }, + }, 201); +}); + +authRoute.post('/login', async (c) => { + const body = loginSchema.parse(await c.req.json()); + + const user = await db.select().from(users).where(eq(users.email, body.email)).limit(1); + if (user.length === 0 || !user[0].passwordHash) { + return c.json({ error: 'Invalid credentials' }, 401); + } + + const validPassword = await Bun.password.verify(body.password, user[0].passwordHash); + if (!validPassword) { + return c.json({ error: 'Invalid credentials' }, 401); + } + + const sessionId = crypto.randomUUID(); + const expiresAt = new Date(Date.now() + 30 * 24 * 60 * 60 * 1000); + + await db.insert(sessions).values({ + id: sessionId, + userId: user[0].id, + expiresAt, + ipAddress: c.req.header('cf-connecting-ip') || c.req.header('x-forwarded-for') || null, + userAgent: c.req.header('user-agent') || null, + }); + + return c.json({ + token: sessionId, + user: { + id: user[0].id, + email: user[0].email, + name: user[0].name, + }, + }); +}); + +authRoute.post('/logout', async (c) => { + const token = c.req.header('Authorization')?.split(' ')[1]; + if (token) { + await db.delete(sessions).where(eq(sessions.id, token)); + } + + return c.json({ message: 'Logged out' }); +}); + +export { authRoute }; +`; + +const AUTH_MIDDLEWARE_FILE = `import { and, eq, gt } from 'drizzle-orm'; +import { createMiddleware } from 'hono/factory'; +import { db } from '../db'; +import { sessions, users } from '../db/schema'; + +export interface AuthContext { + user: { + id: string; + email: string; + name: string | null; + }; +} + +function getSessionToken(authHeader: string | undefined): string | null { + if (!authHeader) return null; + + const parts = authHeader.split(' '); + if (parts.length !== 2 || parts[0] !== 'Bearer') return null; + + return parts[1]; +} + +async function validateSession(token: string): Promise { + const session = await db + .select() + .from(sessions) + .where(and(eq(sessions.id, token), gt(sessions.expiresAt, new Date()))) + .limit(1); + + if (session.length === 0) return null; + + const user = await db.select().from(users).where(eq(users.id, session[0].userId)).limit(1); + return user.length > 0 ? user[0] : null; +} + +export const requireAuth = createMiddleware<{ Variables: AuthContext }>(async (c, next) => { + const token = getSessionToken(c.req.header('Authorization')); + + if (!token) { + return c.json({ error: 'Unauthorized: No token provided' }, 401); + } + + const user = await validateSession(token); + if (!user) { + return c.json({ error: 'Unauthorized: Invalid or expired token' }, 401); + } + + c.set('user', user); + await next(); +}); + +export const optionalAuth = createMiddleware<{ Variables: Partial }>(async (c, next) => { + const token = getSessionToken(c.req.header('Authorization')); + + if (token) { + const user = await validateSession(token); + if (user) { + c.set('user', user); + } + } + + await next(); +}); + +export function getUser(c: { get: (key: 'user') => AuthContext['user'] }): AuthContext['user'] { + return c.get('user'); +} +`; + +function appendIfMissing(filePath: string, marker: string, content: string): void { + const current = readFileSync(filePath, 'utf-8'); + if (current.includes(marker)) { + return; + } + + const next = current.trimEnd() + '\n\n' + content.trim() + '\n'; + writeFileSync(filePath, next); +} + +function ensurePasswordHashColumn(schemaPath: string): void { + const current = readFileSync(schemaPath, 'utf-8'); + if (/passwordHash\s*:\s*text\('password_hash'\)/.test(current)) { + return; + } + + const usersBlock = current.match(/export\s+const\s+users\s*=\s*sqliteTable\([^]+?\}\);/m); + if (!usersBlock) { + logger.warn('Could not find sqlite users table block; skipping passwordHash injection.'); + return; + } + + const replacement = usersBlock[0].replace(/\n\}\);$/, "\n passwordHash: text('password_hash').notNull(),\n});"); + writeFileSync(schemaPath, current.replace(usersBlock[0], replacement)); +} + +function ensureAuthInConfig(projectRoot: string): void { + const configPath = path.join(projectRoot, 'betterbase.config.ts'); + if (!existsSync(configPath)) return; + + const current = readFileSync(configPath, 'utf-8'); + if (current.includes('auth: {')) return; + + const patterns = [ + { + regex: /export\s+default\s+\{/, + replace: `export default {\n auth: {\n enabled: true,\n secret: process.env.AUTH_SECRET,\n sessionDuration: 30 * 24 * 60 * 60,\n },`, + }, + { + regex: /export\s+default\s+defineConfig\s*\(\s*\{/, + replace: `export default defineConfig({\n auth: {\n enabled: true,\n secret: process.env.AUTH_SECRET,\n sessionDuration: 30 * 24 * 60 * 60,\n },`, + }, + ]; + + let updated = current; + for (const pattern of patterns) { + if (pattern.regex.test(updated)) { + updated = updated.replace(pattern.regex, pattern.replace); + break; + } + } + + if (updated !== current) { + writeFileSync(configPath, updated); + } else { + logger.warn(`Could not automatically patch auth config in ${configPath}. Please add auth config manually.`); + } +} + +function ensureEnvVar(projectRoot: string): void { + const envPath = path.join(projectRoot, '.env.example'); + if (!existsSync(envPath)) return; + + const env = readFileSync(envPath, 'utf-8'); + if (env.includes('AUTH_SECRET=')) return; + + writeFileSync(envPath, `${env.trimEnd()}\n\n# Auth\nAUTH_SECRET=your-secret-key-here-change-in-production\n`); +} + +function ensureRoutesIndexHook(projectRoot: string): void { + const routesIndexPath = path.join(projectRoot, 'src/routes/index.ts'); + if (!existsSync(routesIndexPath)) return; + + let current = readFileSync(routesIndexPath, 'utf-8'); + + if (!current.includes("import { authRoute } from './auth';")) { + current = current.replace("import { usersRoute } from './users';", "import { usersRoute } from './users';\nimport { authRoute } from './auth';"); + } + + if (!current.includes("app.route('/auth', authRoute);")) { + current = current.replace("app.route('/api/users', usersRoute);", "app.route('/api/users', usersRoute);\n app.route('/auth', authRoute);"); + } + + writeFileSync(routesIndexPath, current); +} + +export async function runAuthSetupCommand(projectRoot: string = process.cwd()): Promise { + const resolvedRoot = path.resolve(projectRoot); + const schemaPath = path.join(resolvedRoot, 'src/db/schema.ts'); + const middlewarePath = path.join(resolvedRoot, 'src/middleware/auth.ts'); + const routePath = path.join(resolvedRoot, 'src/routes/auth.ts'); + + if (!existsSync(schemaPath)) { + throw new Error(`Could not find schema file at ${schemaPath}. Run this command from a BetterBase app root.`); + } + + logger.info('šŸ” Setting up authentication...'); + + logger.info('šŸ“¦ Installing better-auth...'); + execSync('bun add better-auth', { cwd: resolvedRoot, stdio: 'inherit' }); + + logger.info('šŸ“ Adding auth tables to schema...'); + ensurePasswordHashColumn(schemaPath); + appendIfMissing(schemaPath, "export const sessions = sqliteTable('sessions'", AUTH_SCHEMA_BLOCK); + + logger.info('šŸ›”ļø Creating auth middleware...'); + mkdirSync(path.dirname(middlewarePath), { recursive: true }); + if (!existsSync(middlewarePath)) { + writeFileSync(middlewarePath, AUTH_MIDDLEWARE_FILE); + } else { + logger.warn(`Skipping existing middleware file: ${middlewarePath}`); + } + + logger.info('🧭 Creating auth routes...'); + mkdirSync(path.dirname(routePath), { recursive: true }); + if (!existsSync(routePath)) { + writeFileSync(routePath, AUTH_ROUTE_FILE); + } else { + logger.warn(`Skipping existing route file: ${routePath}`); + } + ensureRoutesIndexHook(resolvedRoot); + + logger.info('āš™ļø Updating config...'); + ensureAuthInConfig(resolvedRoot); + ensureEnvVar(resolvedRoot); + + logger.success('Authentication setup complete!'); + logger.info('Next steps:'); + logger.info('1. Set AUTH_SECRET in .env'); + logger.info('2. Run: bun run db:push'); + logger.info('3. Use requireAuth middleware on protected routes'); +} diff --git a/betterbase/packages/cli/src/commands/dev.ts b/betterbase/packages/cli/src/commands/dev.ts new file mode 100644 index 0000000..762a021 --- /dev/null +++ b/betterbase/packages/cli/src/commands/dev.ts @@ -0,0 +1,65 @@ +import { existsSync, watch, type FSWatcher } from 'node:fs'; +import path from 'node:path'; +import { ContextGenerator } from '../utils/context-generator'; +import * as logger from '../utils/logger'; + +export async function runDevCommand(projectRoot: string = process.cwd()): Promise<() => void> { + const generator = new ContextGenerator(); + + await generator.generate(projectRoot); + + const watchPaths = [path.join(projectRoot, 'src/db/schema.ts'), path.join(projectRoot, 'src/routes')]; + const timers = new Map>(); + const watchers: FSWatcher[] = []; + + for (const watchPath of watchPaths) { + if (!existsSync(watchPath)) { + logger.warn(`Watch path does not exist; skipping: ${watchPath}`); + continue; + } + + try { + const watcher = watch(watchPath, { recursive: true }, (_eventType, filename) => { + logger.info(`File changed: ${String(filename ?? '')}`); + + const existing = timers.get(watchPath); + if (existing) { + clearTimeout(existing); + } + + const timer = setTimeout(async () => { + logger.info('Regenerating context...'); + const start = Date.now(); + + try { + await generator.generate(projectRoot); + logger.info(`Context updated in ${Date.now() - start}ms`); + } catch (error) { + const message = error instanceof Error ? error.message : String(error); + logger.error(`Failed to regenerate context: ${message}`); + } + }, 250); + + timers.set(watchPath, timer); + }); + + watchers.push(watcher); + } catch (error) { + const message = error instanceof Error ? error.message : String(error); + logger.warn(`Failed to watch path ${watchPath}: ${message}`); + } + } + + logger.info('Watching for schema and route changes...'); + + return () => { + for (const timer of timers.values()) { + clearTimeout(timer); + } + timers.clear(); + + for (const watcher of watchers) { + watcher.close(); + } + }; +} diff --git a/betterbase/packages/cli/src/commands/generate.ts b/betterbase/packages/cli/src/commands/generate.ts new file mode 100644 index 0000000..1735e30 --- /dev/null +++ b/betterbase/packages/cli/src/commands/generate.ts @@ -0,0 +1,252 @@ +import { existsSync, mkdirSync, readFileSync, writeFileSync } from 'node:fs'; +import path from 'node:path'; +import { SchemaScanner, type TableInfo } from '../utils/schema-scanner'; +import * as logger from '../utils/logger'; + +function toSingular(name: string): string { + const lower = name.toLowerCase(); + const invariants = new Set(['status', 'news', 'series']); + if (invariants.has(lower)) { + return name; + } + + if (/men$/i.test(name)) { + return name.replace(/men$/i, 'man'); + } + + if (/ies$/i.test(name)) { + return name.replace(/ies$/i, 'y'); + } + + if (/(ses|xes|zes|ches|shes)$/i.test(name)) { + return name.replace(/es$/i, ''); + } + + if (name.endsWith('s') && !name.endsWith('ss')) { + return name.slice(0, -1); + } + + return `${name}Item`; +} + +function schemaTypeToZod(type: string): string { + if (type === 'integer' || type === 'number') return 'z.coerce.number()'; + if (type === 'boolean') return 'z.coerce.boolean()'; + if (type === 'json') return 'z.unknown()'; + if (type === 'datetime') return 'z.coerce.date()'; + return 'z.string()'; +} + +function buildSchemaShape(table: TableInfo, mode: 'create' | 'update'): string { + return Object.entries(table.columns) + .filter(([columnName, column]) => !(column.primaryKey || columnName === 'id')) + .map(([columnName, column]) => { + const base = schemaTypeToZod(column.type); + const optional = mode === 'update' || column.nullable || Boolean(column.defaultValue); + return ` ${columnName}: ${optional ? `${base}.optional()` : base}`; + }) + .join(',\n'); +} + +function generateRouteFile(tableName: string, table: TableInfo): string { + const singular = toSingular(tableName); + const createShape = buildSchemaShape(table, 'create'); + const updateShape = buildSchemaShape(table, 'update'); + + return `import { and, asc, desc, eq } from 'drizzle-orm'; +import { Hono } from 'hono'; +import { zValidator } from '@hono/zod-validator'; +import { z } from 'zod'; +import { db } from '../db'; +import { realtime } from '../lib/realtime'; +import { ${tableName} } from '../db/schema'; + +export const ${tableName}Route = new Hono(); + +const createSchema = z.object({ +${createShape} +}); + +const updateSchema = z.object({ +${updateShape} +}); + +${tableName}Route.get('/', async (c) => { + const limit = Number(c.req.query('limit') ?? 50); + const offset = Number(c.req.query('offset') ?? 0); + const safeLimit = Number.isFinite(limit) && limit >= 0 ? Math.min(limit, 100) : 50; + const safeOffset = Number.isFinite(offset) && offset >= 0 ? offset : 0; + + const queryParams = c.req.query(); + const sort = queryParams.sort; + + const filters = Object.entries(queryParams).filter(([key, value]) => key !== 'limit' && key !== 'offset' && key !== 'sort' && value !== undefined); + + let query = db.select().from(${tableName}).$dynamic(); + + if (filters.length > 0) { + const conditions = filters + .filter(([key]) => key in ${tableName}) + .map(([key, value]) => eq(${tableName}[key as keyof typeof ${tableName}] as never, value as never)); + + if (conditions.length > 0) { + query = query.where(and(...conditions)); + } + } + + if (sort) { + const [field, order] = sort.split(':'); + if (field && field in ${tableName}) { + const column = ${tableName}[field as keyof typeof ${tableName}] as never; + query = query.orderBy(order === 'desc' ? desc(column) : asc(column)); + } + } + + const items = await query.limit(safeLimit).offset(safeOffset); + return c.json({ ${tableName}: items, count: items.length, pagination: { limit: safeLimit, offset: safeOffset } }); +}); + +${tableName}Route.get('/:id', async (c) => { + const id = c.req.param('id'); + const item = await db.select().from(${tableName}).where(eq(${tableName}.id, id as never)).limit(1); + + if (item.length === 0) { + return c.json({ error: '${tableName} not found' }, 404); + } + + return c.json({ ${singular}: item[0] }); +}); + +${tableName}Route.post('/', zValidator('json', createSchema), async (c) => { + const body = c.req.valid('json'); + const created = await db.insert(${tableName}).values(body).returning(); + realtime.broadcast('${tableName}', 'INSERT', created[0]); + return c.json({ ${singular}: created[0] }, 201); +}); + +${tableName}Route.patch('/:id', zValidator('json', updateSchema), async (c) => { + const id = c.req.param('id'); + const body = c.req.valid('json'); + + const updated = await db.update(${tableName}).set(body).where(eq(${tableName}.id, id as never)).returning(); + if (updated.length === 0) { + return c.json({ error: '${tableName} not found' }, 404); + } + + realtime.broadcast('${tableName}', 'UPDATE', updated[0]); + return c.json({ ${singular}: updated[0] }); +}); + +${tableName}Route.delete('/:id', async (c) => { + const id = c.req.param('id'); + const deleted = await db.delete(${tableName}).where(eq(${tableName}.id, id as never)).returning(); + + if (deleted.length === 0) { + return c.json({ error: '${tableName} not found' }, 404); + } + + realtime.broadcast('${tableName}', 'DELETE', { id }); + return c.json({ message: '${singular} deleted', ${singular}: deleted[0] }); +}); +`; +} + +function updateMainRouter(projectRoot: string, tableName: string): void { + const routerPath = path.join(projectRoot, 'src/routes/index.ts'); + if (!existsSync(routerPath)) { + logger.warn(`Routes index not found at ${routerPath}. Please wire the route manually.`); + return; + } + + let router = readFileSync(routerPath, 'utf-8'); + const importLine = `import { ${tableName}Route } from './${tableName}';`; + const routeLine = ` app.route('/api/${tableName}', ${tableName}Route);`; + + if (!router.includes(importLine)) { + const firstRouteImport = /import\s+\{\s*healthRoute\s*\}\s+from\s+'\.\/health';/; + router = firstRouteImport.test(router) ? router.replace(firstRouteImport, (m) => `${m}\n${importLine}`) : `${importLine}\n${router}`; + } + + if (!router.includes(routeLine)) { + const routeStatements = [...router.matchAll(/\s*app\.route\([^\n]+\);/g)]; + if (routeStatements.length > 0) { + const last = routeStatements[routeStatements.length - 1]; + const insertAt = (last.index ?? 0) + last[0].length; + router = `${router.slice(0, insertAt)}\n${routeLine}${router.slice(insertAt)}`; + } else { + router = router.replace(/\n}\s*$/, `\n${routeLine}\n}`); + } + } + + writeFileSync(routerPath, router); +} + +function ensureRealtimeUtility(projectRoot: string): void { + const realtimePath = path.join(projectRoot, 'src/lib/realtime.ts'); + if (existsSync(realtimePath)) return; + + const canonicalRealtimePath = path.resolve(import.meta.dir, '../../../templates/base/src/lib/realtime.ts'); + if (!existsSync(canonicalRealtimePath)) { + throw new Error(`Canonical realtime template not found at ${canonicalRealtimePath}`); + } + + mkdirSync(path.dirname(realtimePath), { recursive: true }); + writeFileSync(realtimePath, readFileSync(canonicalRealtimePath, 'utf-8')); +} + +async function ensureZodValidatorInstalled(projectRoot: string): Promise { + logger.info('Installing @hono/zod-validator...'); + const process = Bun.spawn(['bun', 'add', '@hono/zod-validator'], { + cwd: projectRoot, + stdout: 'pipe', + stderr: 'pipe', + }); + + const [exitCode, stdout, stderr] = await Promise.all([ + process.exited, + new Response(process.stdout).text(), + new Response(process.stderr).text(), + ]); + + if (exitCode !== 0) { + if (stdout.trim()) logger.warn(stdout.trim()); + if (stderr.trim()) logger.error(stderr.trim()); + throw new Error('Failed to install @hono/zod-validator.'); + } +} + +export async function runGenerateCrudCommand(projectRoot: string, tableName: string): Promise { + const resolvedRoot = path.resolve(projectRoot); + const schemaPath = path.join(resolvedRoot, 'src/db/schema.ts'); + + if (!existsSync(schemaPath)) { + throw new Error(`Schema file not found at ${schemaPath}`); + } + + logger.info(`Generating CRUD for ${tableName}...`); + + const scanner = new SchemaScanner(schemaPath); + const tables = scanner.scan(); + const table = tables[tableName]; + if (!table) { + throw new Error(`Table "${tableName}" not found in schema.`); + } + + await ensureZodValidatorInstalled(resolvedRoot); + ensureRealtimeUtility(resolvedRoot); + + const routesDir = path.join(resolvedRoot, 'src/routes'); + mkdirSync(routesDir, { recursive: true }); + + const routePath = path.join(routesDir, `${tableName}.ts`); + writeFileSync(routePath, generateRouteFile(tableName, table)); + + updateMainRouter(resolvedRoot, tableName); + + logger.success(`Generated ${routePath}`); + logger.info(`GET /api/${tableName}`); + logger.info(`GET /api/${tableName}/:id`); + logger.info(`POST /api/${tableName}`); + logger.info(`PATCH /api/${tableName}/:id`); + logger.info(`DELETE /api/${tableName}/:id`); +} diff --git a/betterbase/packages/cli/src/commands/init.ts b/betterbase/packages/cli/src/commands/init.ts index 5859b89..0e7c6ac 100644 --- a/betterbase/packages/cli/src/commands/init.ts +++ b/betterbase/packages/cli/src/commands/init.ts @@ -63,8 +63,8 @@ async function initializeGitRepository(projectPath: string): Promise { function buildPackageJson(projectName: string, databaseMode: DatabaseMode, useAuth: boolean): string { const dependencies: Record = { hono: '^4.11.9', - 'drizzle-orm': '^0.44.5', - zod: '^3.25.76', + 'drizzle-orm': '^0.45.1', + zod: '^4.3.6', }; if (databaseMode === 'turso') { @@ -338,19 +338,20 @@ import { logger } from 'hono/logger'; import { HTTPException } from 'hono/http-exception'; import { healthRoute } from './health'; import { usersRoute } from './users'; +import { env } from '../lib/env'; -export default function registerRoutes(app: Hono): void { +export function registerRoutes(app: Hono): void { app.use('*', cors()); app.use('*', logger()); app.onError((err, c) => { const isHttpError = err instanceof HTTPException; - const showDetailedError = process.env.NODE_ENV === 'development' || isHttpError; + const showDetailedError = env.NODE_ENV === 'development' || isHttpError; return c.json( { error: showDetailedError ? err.message : 'Internal Server Error', - stack: process.env.NODE_ENV === 'development' ? err.stack : undefined, + stack: env.NODE_ENV === 'development' ? err.stack : undefined, details: isHttpError ? (err as { cause?: unknown }).cause ?? null : null, }, isHttpError ? err.status : 500, @@ -374,6 +375,23 @@ async function writeProjectFiles( await mkdir(path.join(projectPath, 'src/middleware'), { recursive: true }); await mkdir(path.join(projectPath, 'src/lib'), { recursive: true }); + + await writeFile( + path.join(projectPath, 'src/lib/env.ts'), + `import { z } from 'zod'; + +export const DEFAULT_DB_PATH = 'local.db'; + +const envSchema = z.object({ + NODE_ENV: z.enum(['development', 'test', 'production']).default('development'), + PORT: z.coerce.number().int().positive().default(3000), + DB_PATH: z.string().min(1).default(DEFAULT_DB_PATH), +}); + +export const env = envSchema.parse(process.env); +`, + ); + await writeFile( path.join(projectPath, 'betterbase.config.ts'), `export default { @@ -427,7 +445,6 @@ bun.lockb .env.* !.env.example local.db -.drizzle `, ); @@ -447,7 +464,7 @@ export const healthRoute = new Hono(); healthRoute.get('/', async (c) => { try { - await db.run(sql\`select 1\`); + await db.${databaseMode === 'local' ? 'run' : 'execute'}(sql\`select 1\`); return c.json({ status: 'healthy', @@ -471,9 +488,9 @@ healthRoute.get('/', async (c) => { await writeFile( path.join(projectPath, 'src/middleware/validation.ts'), `import { HTTPException } from 'hono/http-exception'; -import type { ZodType } from 'zod'; +import { z } from 'zod'; -export function parseBody(schema: ZodType, body: unknown): T { +export function parseBody(schema: S, body: unknown): z.output { const result = schema.safeParse(body); if (!result.success) { @@ -510,9 +527,60 @@ const createUserSchema = z.object({ export const usersRoute = new Hono(); +const DEFAULT_LIMIT = 25; +const MAX_LIMIT = 100; +const DEFAULT_OFFSET = 0; + +function parseNonNegativeInt(value: string | undefined, fallback: number): number { + if (!value) { + return fallback; + } + + const parsed = Number(value); + if (!Number.isInteger(parsed) || parsed < 0) { + return fallback; + } + + return parsed; +} + usersRoute.get('/', async (c) => { - const allUsers = await db.select().from(users); - return c.json({ users: allUsers }); + const requestedLimit = parseNonNegativeInt(c.req.query('limit'), DEFAULT_LIMIT); + const limit = Math.min(requestedLimit, MAX_LIMIT); + const offset = parseNonNegativeInt(c.req.query('offset'), DEFAULT_OFFSET); + + if (limit === 0) { + return c.json({ + users: [], + pagination: { + limit, + offset, + hasMore: false, + }, + }); + } + + try { + const rows = await db.select().from(users).limit(limit + 1).offset(offset); + const hasMore = rows.length > limit; + const paginatedUsers = rows.slice(0, limit); + + return c.json({ + users: paginatedUsers, + pagination: { + limit, + offset, + hasMore, + }, + }); + } catch (error) { + if (error instanceof HTTPException) { + throw error; + } + + console.error('Failed to fetch users:', error); + throw error; + } }); usersRoute.post('/', async (c) => { @@ -545,15 +613,16 @@ usersRoute.post('/', async (c) => { await writeFile( path.join(projectPath, 'src/index.ts'), `import { Hono } from 'hono'; -import registerRoutes from './routes'; +import { env } from './lib/env'; +import { registerRoutes } from './routes'; const app = new Hono(); registerRoutes(app); const server = Bun.serve({ fetch: app.fetch, - port: Number(process.env.PORT ?? 3000), - development: process.env.NODE_ENV === 'development', + port: env.PORT, + development: env.NODE_ENV === 'development', }); console.log(\`šŸš€ Server running at http://localhost:\${server.port}\`); diff --git a/betterbase/packages/cli/src/commands/migrate.ts b/betterbase/packages/cli/src/commands/migrate.ts index 602915b..af22934 100644 --- a/betterbase/packages/cli/src/commands/migrate.ts +++ b/betterbase/packages/cli/src/commands/migrate.ts @@ -1,32 +1,344 @@ +import { Database } from 'bun:sqlite'; +import chalk from 'chalk'; +import { access, mkdir, readdir } from 'node:fs/promises'; +import path from 'node:path'; import { z } from 'zod'; +import { DEFAULT_DB_PATH } from '../constants'; import * as logger from '../utils/logger'; import * as prompts from '../utils/prompts'; const migrateOptionsSchema = z.object({ - destructive: z.boolean().optional(), + preview: z.boolean().optional(), + production: z.boolean().optional(), }); export type MigrateCommandOptions = z.infer; -/** - * Run the `bb migrate` command. - */ +export type MigrationChangeType = 'create_table' | 'add_column' | 'modify_column' | 'drop_column' | 'drop_table'; + +export interface MigrationChange { + type: MigrationChangeType; + table: string; + column?: string; + detail?: string; + isDestructive: boolean; +} + +interface DrizzleResult { + success: boolean; + stdout: string; + stderr: string; +} + +interface MigrationBackup { + sourcePath: string; + backupPath: string; +} + +const DRIZZLE_DIR = 'drizzle'; +const DRIZZLE_TIMEOUT_MS = 30_000; + +function captureIdentifier(match: RegExpMatchArray, startIndex: number): string { + return match[startIndex] ?? match[startIndex + 1] ?? match[startIndex + 2] ?? ''; +} + +async function runDrizzleKit(args: string[]): Promise { + const controller = new AbortController(); + const timeout = setTimeout(() => controller.abort(), DRIZZLE_TIMEOUT_MS); + + const proc = Bun.spawn(['bunx', 'drizzle-kit', ...args], { + cwd: process.cwd(), + stdout: 'pipe', + stderr: 'pipe', + signal: controller.signal, + }); + + try { + const [stdout, stderr, exitCode] = await Promise.all([ + new Response(proc.stdout).text(), + new Response(proc.stderr).text(), + proc.exited, + ]); + + return { success: exitCode === 0, stdout, stderr }; + } catch { + return { + success: false, + stdout: '', + stderr: `drizzle-kit ${args.join(' ')} timed out after ${DRIZZLE_TIMEOUT_MS / 1000}s`, + }; + } finally { + clearTimeout(timeout); + } +} + +async function listSqlFiles(baseDir: string): Promise> { + const entries = new Map(); + const root = path.join(process.cwd(), baseDir); + + const walk = async (dir: string): Promise => { + try { + await access(dir); + } catch { + return; + } + + for (const entry of await readdir(dir, { withFileTypes: true })) { + const fullPath = path.join(dir, entry.name); + + if (entry.isDirectory()) { + await walk(fullPath); + continue; + } + + if (!fullPath.endsWith('.sql')) { + continue; + } + + entries.set(path.relative(root, fullPath), await Bun.file(fullPath).text()); + } + }; + + await walk(root); + return entries; +} + +function analyzeMigration(sqlStatements: string[]): MigrationChange[] { + const changes: MigrationChange[] = []; + const ident = '(?:"([^"]+)"|`([^`]+)`|([\\w.-]+))'; + + for (const statement of sqlStatements) { + const sql = statement.trim(); + if (!sql) continue; + + const createTable = sql.match(new RegExp(`create\\s+table(?:\\s+if\\s+not\\s+exists)?\\s+${ident}`, 'i')); + if (createTable) { + changes.push({ type: 'create_table', table: captureIdentifier(createTable, 1), isDestructive: false, detail: sql }); + continue; + } + + const dropTable = sql.match(new RegExp(`drop\\s+table(?:\\s+if\\s+exists)?\\s+${ident}`, 'i')); + if (dropTable) { + changes.push({ type: 'drop_table', table: captureIdentifier(dropTable, 1), isDestructive: true, detail: sql }); + continue; + } + + const addColumn = sql.match( + new RegExp(`alter\\s+table(?:\\s+if\\s+exists)?\\s+${ident}\\s+add\\s+column(?:\\s+if\\s+not\\s+exists)?\\s+${ident}`, 'i'), + ); + if (addColumn) { + changes.push({ + type: 'add_column', + table: captureIdentifier(addColumn, 1), + column: captureIdentifier(addColumn, 4), + isDestructive: false, + detail: sql, + }); + continue; + } + + const dropColumn = sql.match( + new RegExp(`alter\\s+table(?:\\s+if\\s+exists)?\\s+${ident}\\s+drop\\s+column(?:\\s+if\\s+exists)?\\s+${ident}`, 'i'), + ); + if (dropColumn) { + changes.push({ + type: 'drop_column', + table: captureIdentifier(dropColumn, 1), + column: captureIdentifier(dropColumn, 4), + isDestructive: true, + detail: sql, + }); + continue; + } + + const alterColumn = sql.match( + new RegExp( + `alter\\s+table(?:\\s+if\\s+exists)?\\s+${ident}\\s+(?:alter\\s+column\\s+${ident}|rename\\s+column\\s+${ident})`, + 'i', + ), + ); + if (alterColumn) { + changes.push({ + type: 'modify_column', + table: captureIdentifier(alterColumn, 1), + column: captureIdentifier(alterColumn, 4) || captureIdentifier(alterColumn, 7), + isDestructive: /drop\s+not\s+null|set\s+not\s+null|set\s+data\s+type|rename\s+column/i.test(sql), + detail: sql, + }); + } + } + + return changes; +} + +function displayDiff(changes: MigrationChange[]): void { + console.log('\nšŸ“Š Migration Preview\n'); + + if (changes.length === 0) { + console.log(chalk.gray('No schema changes detected.')); + return; + } + + const newTables = changes.filter((c) => c.type === 'create_table'); + const newColumns = changes.filter((c) => c.type === 'add_column'); + const modified = changes.filter((c) => c.type === 'modify_column'); + const destructive = changes.filter((c) => c.isDestructive); + + if (newTables.length) { + console.log(chalk.green('āœ… New Tables:')); + newTables.forEach((change) => console.log(chalk.green(` + ${change.table}`))); + console.log(''); + } + + if (newColumns.length) { + console.log(chalk.green('āœ… New Columns:')); + newColumns.forEach((change) => console.log(chalk.green(` + ${change.table}.${change.column ?? ''}`))); + console.log(''); + } + + if (modified.length) { + console.log(chalk.yellow('āš ļø Modified Columns:')); + modified.forEach((change) => console.log(chalk.yellow(` ! ${change.table}.${change.column ?? ''}`))); + console.log(''); + } + + if (destructive.length) { + console.log(chalk.red('āŒ Destructive Changes:')); + destructive.forEach((change) => { + console.log(chalk.red(` - ${change.type}: ${change.table}${change.column ? `.${change.column}` : ''}`)); + console.log(chalk.red(' āš ļø This will DELETE DATA')); + }); + console.log(''); + } +} + +async function confirmDestructive(changes: MigrationChange[]): Promise { + const destructive = changes.filter((c) => c.isDestructive); + if (destructive.length === 0) return true; + + logger.warn('DESTRUCTIVE CHANGES DETECTED:'); + destructive.forEach((change) => console.log(` - ${change.type}: ${change.table}${change.column ? `.${change.column}` : ''}`)); + + const confirmation = await prompts.text({ message: 'Type "delete data" to confirm:' }); + if (confirmation !== 'delete data') { + logger.warn('Confirmation phrase mismatch. Migration cancelled.'); + return false; + } + + return true; +} + +async function backupDatabase(): Promise { + const sourcePath = process.env.DB_PATH ?? DEFAULT_DB_PATH; + + try { + await access(sourcePath); + } catch { + logger.warn(`No local database found at ${sourcePath}; skipping backup.`); + return null; + } + + const timestamp = new Date().toISOString().replace(/:/g, '-'); + const backupDir = path.join(process.cwd(), 'backups'); + await mkdir(backupDir, { recursive: true }); + + const backupPath = path.join(backupDir, `db-${timestamp}.sqlite`); + + const db = new Database(sourcePath, { readonly: true }); + try { + await Bun.write(backupPath, db.serialize()); + } finally { + db.close(); + } + + logger.success(`Backup saved: ${backupPath}`); + return { sourcePath, backupPath }; +} + +async function restoreBackup(backup: MigrationBackup | null): Promise { + if (backup === null) return; + const bytes = await Bun.file(backup.backupPath).bytes(); + await Bun.write(backup.sourcePath, bytes); + logger.warn(`Rollback complete. Restored database from ${backup.backupPath}`); +} + +function splitStatements(sql: string): string[] { + return sql + .split(/;\s*/g) + .map((statement) => statement.trim()) + .filter((statement) => statement.length > 0); +} + +async function collectChangesFromGenerate(): Promise { + const before = await listSqlFiles(DRIZZLE_DIR); + const generate = await runDrizzleKit(['generate']); + + if (!generate.success) { + if (/conflict|merge/i.test(generate.stderr)) { + throw new Error(`Migration conflict detected. Resolve migration files manually.\n${generate.stderr}`); + } + + throw new Error(`Failed to generate migrations.\n${generate.stderr || generate.stdout}`); + } + + const after = await listSqlFiles(DRIZZLE_DIR); + const changedSql: string[] = []; + + for (const [relativePath, content] of after.entries()) { + const previous = before.get(relativePath); + if (previous === content) continue; + + // Intentionally analyze full changed file content: drizzle-kit typically creates new migration files, + // so whole-file analysis is simpler and reliable. If in-place edits become common, switch to a true diff. + changedSql.push(...splitStatements(content)); + } + + return analyzeMigration(changedSql); +} + export async function runMigrateCommand(rawOptions: MigrateCommandOptions): Promise { const options = migrateOptionsSchema.parse(rawOptions); - const shouldContinue = - options.destructive === true - ? true - : await prompts.confirm({ - message: 'This migration may include destructive changes. Continue?', - initial: false, - }); + logger.info('Generating migration files with drizzle-kit...'); + const changes = await collectChangesFromGenerate(); + displayDiff(changes); - if (!shouldContinue) { - logger.warn('Migration cancelled by user.'); + if (options.preview) { + logger.info('Preview mode enabled. No migrations applied.'); return; } - logger.info('Analyzing migration plan...'); - logger.success('Migration scaffold complete. (Placeholder implementation)'); + if (options.production) { + const proceed = await prompts.confirm({ message: 'Apply migrations to production now?', initial: false }); + if (!proceed) { + logger.warn('Migration cancelled by user.'); + return; + } + } + + let backup: MigrationBackup | null = null; + if (changes.some((change) => change.isDestructive)) { + backup = await backupDatabase(); + const confirmed = await confirmDestructive(changes); + if (!confirmed) return; + } + + logger.info('Applying migrations with drizzle-kit push...'); + const push = await runDrizzleKit(['push']); + + if (!push.success) { + await restoreBackup(backup); + + if (/\b(?:connect(?:ion)?|econnrefused|econnreset|enotfound|etimedout)\b/i.test(push.stderr)) { + throw new Error(`Database connection failed while applying migration.\n${push.stderr}`); + } + + if (/conflict|merge/i.test(push.stderr)) { + throw new Error(`Migration conflict detected during push. Please resolve and retry.\n${push.stderr}`); + } + + throw new Error(`Migration push failed.\n${push.stderr || push.stdout}`); + } + + logger.success('Migration complete!'); } diff --git a/betterbase/packages/cli/src/constants.ts b/betterbase/packages/cli/src/constants.ts new file mode 100644 index 0000000..4190ff6 --- /dev/null +++ b/betterbase/packages/cli/src/constants.ts @@ -0,0 +1,2 @@ +// Keep in sync with templates/base/src/lib/env.ts DEFAULT_DB_PATH. +export const DEFAULT_DB_PATH = 'local.db'; diff --git a/betterbase/packages/cli/src/index.ts b/betterbase/packages/cli/src/index.ts index e0136da..25e8e04 100644 --- a/betterbase/packages/cli/src/index.ts +++ b/betterbase/packages/cli/src/index.ts @@ -1,6 +1,9 @@ import { Command, CommanderError } from 'commander'; import { runInitCommand } from './commands/init'; +import { runDevCommand } from './commands/dev'; import { runMigrateCommand } from './commands/migrate'; +import { runAuthSetupCommand } from './commands/auth'; +import { runGenerateCrudCommand } from './commands/generate'; import * as logger from './utils/logger'; import packageJson from '../package.json'; @@ -24,12 +27,57 @@ export function createProgram(): Command { await runInitCommand({ projectName }); }); + + program + .command('dev') + .description('Watch schema/routes and regenerate .betterbase-context.json') + .argument('[project-root]', 'project root directory', process.cwd()) + .action(async (projectRoot: string) => { + await runDevCommand(projectRoot); + }); + + + const auth = program.command('auth').description('Authentication helpers'); + + auth + .command('setup') + .description('Install and scaffold BetterAuth integration') + .argument('[project-root]', 'project root directory', process.cwd()) + .action(async (projectRoot: string) => { + await runAuthSetupCommand(projectRoot); + }); + + + const generate = program.command('generate').description('Code generation helpers'); + + generate + .command('crud') + .description('Generate full CRUD routes for a table') + .argument('', 'table name from src/db/schema.ts') + .argument('[project-root]', 'project root directory', process.cwd()) + .action(async (tableName: string, projectRoot: string) => { + await runGenerateCrudCommand(projectRoot, tableName); + }); + program .command('migrate') - .description('Run BetterBase database migrations') - .option('--destructive', 'allow destructive migration flow') - .action(async (options: { destructive?: boolean }) => { - await runMigrateCommand({ destructive: options.destructive }); + .description('Generate and apply migrations for local development') + .action(async () => { + await runMigrateCommand({}); + }); + + program + .command('migrate:preview') + .description('Preview migration diff without applying changes') + .action(async () => { + await runMigrateCommand({ preview: true }); + }); + + program + .command('migrate:production') + .description('Apply migrations to production (requires confirmation)') + .action(async () => { + await runMigrateCommand({ production: true }); }); return program; diff --git a/betterbase/packages/cli/src/utils/context-generator.ts b/betterbase/packages/cli/src/utils/context-generator.ts new file mode 100644 index 0000000..794c8ee --- /dev/null +++ b/betterbase/packages/cli/src/utils/context-generator.ts @@ -0,0 +1,84 @@ +import { existsSync, writeFileSync } from 'node:fs'; +import path from 'node:path'; +import { RouteScanner, type RouteInfo } from './route-scanner'; +import { SchemaScanner, type TableInfo } from './schema-scanner'; +import * as logger from './logger'; + +export interface BetterBaseContext { + version: string; + generated_at: string; + tables: Record; + routes: Record; + ai_prompt: string; +} + +export class ContextGenerator { + async generate(projectRoot: string): Promise { + const schemaPath = path.join(projectRoot, 'src/db/schema.ts'); + const routesPath = path.join(projectRoot, 'src/routes'); + + let tables: Record = {}; + let routes: Record = {}; + + if (existsSync(schemaPath)) { + const schemaScanner = new SchemaScanner(schemaPath); + tables = schemaScanner.scan(); + } else { + logger.warn(`Schema file not found; continuing with empty tables: ${schemaPath}`); + } + + if (existsSync(routesPath)) { + const routeScanner = new RouteScanner(); + routes = routeScanner.scan(routesPath); + } else { + logger.warn(`Routes directory not found; continuing with empty routes: ${routesPath}`); + } + + const context: BetterBaseContext = { + version: '1.0.0', + generated_at: new Date().toISOString(), + tables, + routes, + ai_prompt: this.generateAIPrompt(tables, routes), + }; + + const outputPath = path.join(projectRoot, '.betterbase-context.json'); + writeFileSync(outputPath, `${JSON.stringify(context, null, 2)}\n`); + console.log(`āœ… Generated ${outputPath}`); + + return context; + } + + private generateAIPrompt(tables: Record, routes: Record): string { + const tableNames = Object.keys(tables); + const routeCount = Object.values(routes).reduce((count, methods) => count + methods.length, 0); + + let prompt = `This is a BetterBase backend project with ${tableNames.length} tables and ${routeCount} API endpoints.\n\n`; + + prompt += 'DATABASE SCHEMA:\n'; + for (const tableName of tableNames) { + const table = tables[tableName]; + const columns = Object.keys(table.columns ?? {}).join(', '); + prompt += `- ${tableName}: ${columns}\n`; + if (table.relations.length > 0) { + prompt += ` Relations: ${table.relations.join(', ')}\n`; + } + } + + prompt += '\nAPI ENDPOINTS:\n'; + for (const [routePath, methods] of Object.entries(routes)) { + for (const route of methods) { + const auth = route.requiresAuth ? ' [AUTH REQUIRED]' : ''; + prompt += `- ${route.method} ${routePath}${auth}\n`; + } + } + + prompt += '\nWhen writing code for this project:\n'; + prompt += "1. Always import tables from '../db/schema'\n"; + prompt += '2. Use Drizzle ORM for database queries\n'; + prompt += '3. Validate inputs with Zod\n'; + prompt += '4. Return JSON responses with proper status codes\n'; + + return prompt; + } +} diff --git a/betterbase/packages/cli/src/utils/route-scanner.ts b/betterbase/packages/cli/src/utils/route-scanner.ts new file mode 100644 index 0000000..d5cd2c9 --- /dev/null +++ b/betterbase/packages/cli/src/utils/route-scanner.ts @@ -0,0 +1,165 @@ +import { readdirSync, readFileSync } from 'node:fs'; +import path from 'node:path'; +import * as ts from 'typescript'; + +export interface RouteInfo { + method: string; + path: string; + requiresAuth: boolean; + inputSchema?: string; + outputSchema?: string; +} + +function getStringLiteral(node: ts.Node | undefined): string { + if (!node) return ''; + if (ts.isStringLiteral(node) || ts.isNoSubstitutionTemplateLiteral(node)) { + return node.text; + } + return node.getText(); +} + +function isAuthLikeName(value: string): boolean { + return /\bauth\b/i.test(value) || /^auth/i.test(value) || /^(authMiddleware|requireAuth)$/i.test(value); +} + +function collectTsFiles(dir: string): string[] { + const files: string[] = []; + + const walk = (current: string): void => { + let entries: ReturnType; + try { + entries = readdirSync(current, { withFileTypes: true }); + } catch { + return; + } + + for (const entry of entries) { + const fullPath = path.join(current, entry.name); + if (entry.isDirectory()) { + walk(fullPath); + continue; + } + + if (entry.isFile() && entry.name.endsWith('.ts') && !entry.name.endsWith('.d.ts')) { + files.push(fullPath); + } + } + }; + + walk(dir); + return files; +} + +export class RouteScanner { + scan(routesDir: string): Record { + const files = collectTsFiles(routesDir); + const routes: Record = {}; + + for (const file of files) { + const fileRoutes = this.scanFile(file); + for (const [routePath, entries] of Object.entries(fileRoutes)) { + routes[routePath] = [...(routes[routePath] ?? []), ...entries]; + } + } + + return routes; + } + + private scanFile(filePath: string): Record { + const sourceCode = readFileSync(filePath, 'utf-8'); + const sourceFile = ts.createSourceFile(filePath, sourceCode, ts.ScriptTarget.Latest, true, ts.ScriptKind.TS); + + const routes: Record = {}; + const authIdentifiers = new Set(); + + const isAuthMiddlewareExpression = (expr: ts.Expression): boolean => { + if (ts.isIdentifier(expr)) { + return authIdentifiers.has(expr.text) || isAuthLikeName(expr.text); + } + + if (ts.isPropertyAccessExpression(expr)) { + const text = expr.getText(sourceFile); + return isAuthLikeName(text); + } + + return false; + }; + + const collectAuthIdentifiers = (node: ts.Node): void => { + if (!ts.isVariableStatement(node)) return; + + for (const declaration of node.declarationList.declarations) { + if (!ts.isIdentifier(declaration.name) || !declaration.initializer) continue; + const initializer = declaration.initializer; + if (ts.isCallExpression(initializer) && ts.isIdentifier(initializer.expression)) { + if (initializer.expression.text === 'createMiddleware' || initializer.expression.text === 'requireAuth') { + authIdentifiers.add(declaration.name.text); + } + } + + if (isAuthLikeName(declaration.name.text)) { + authIdentifiers.add(declaration.name.text); + } + } + }; + + ts.forEachChild(sourceFile, collectAuthIdentifiers); + + const visit = (node: ts.Node): void => { + if (ts.isCallExpression(node) && ts.isPropertyAccessExpression(node.expression)) { + const method = node.expression.name.text.toLowerCase(); + const httpMethods = new Set(['get', 'post', 'put', 'patch', 'delete', 'options', 'head']); + + if (httpMethods.has(method)) { + const [pathArg, ...handlerArgs] = node.arguments; + const routePath = getStringLiteral(pathArg); + + let requiresAuth = false; + for (const arg of handlerArgs) { + if (isAuthMiddlewareExpression(arg)) { + requiresAuth = true; + break; + } + } + + const route: RouteInfo = { + method: method.toUpperCase(), + path: routePath, + requiresAuth, + inputSchema: this.findSchemaUsage(sourceFile, handlerArgs, 'input'), + outputSchema: this.findSchemaUsage(sourceFile, handlerArgs, 'output'), + }; + + if (!routes[routePath]) { + routes[routePath] = []; + } + + routes[routePath].push(route); + } + } + + ts.forEachChild(node, visit); + }; + + visit(sourceFile); + return routes; + } + + private findSchemaUsage(sourceFile: ts.SourceFile, args: readonly ts.Expression[], mode: 'input' | 'output'): string | undefined { + const text = args.map((arg) => arg.getText(sourceFile)).join('\n'); + + if (mode === 'input') { + const parseMatch = text.match(/([A-Za-z0-9_]+Schema)\.(safeParse|parse)\(/); + if (parseMatch) return parseMatch[1]; + const middlewareMatch = text.match(/parseBody\(([^,]+),/); + if (middlewareMatch) return middlewareMatch[1].trim(); + } + + if (mode === 'output') { + const outputMatch = text.match(/([A-Za-z0-9_]+Schema)\.(parse|safeParse)\([^)]*c\.json/); + if (outputMatch) return outputMatch[1]; + } + + return undefined; + } +} diff --git a/betterbase/packages/cli/src/utils/scanner.ts b/betterbase/packages/cli/src/utils/scanner.ts new file mode 100644 index 0000000..0dee86d --- /dev/null +++ b/betterbase/packages/cli/src/utils/scanner.ts @@ -0,0 +1,252 @@ +import { readFileSync } from 'node:fs'; +import * as ts from 'typescript'; + +export interface ColumnInfo { + name: string; + type: string; + nullable: boolean; + unique: boolean; + primaryKey: boolean; + defaultValue?: string; + references?: string; +} + +export interface TableInfo { + name: string; + columns: Record; + relations: string[]; + indexes: string[]; +} + +function unwrapExpression(expression: ts.Expression): ts.Expression { + let current = expression; + + while ( + ts.isParenthesizedExpression(current) || + ts.isAsExpression(current) || + ts.isTypeAssertionExpression(current) || + ts.isSatisfiesExpression(current) + ) { + current = (current as ts.ParenthesizedExpression | ts.AsExpression | ts.TypeAssertion | ts.SatisfiesExpression) + .expression; + } + + return current; +} + +function getCallName(call: ts.CallExpression): string { + if (ts.isIdentifier(call.expression)) { + return call.expression.text; + } + + if (ts.isPropertyAccessExpression(call.expression)) { + return call.expression.name.text; + } + + return ''; +} + +function getExpressionText(sourceFile: ts.SourceFile, node: ts.Node | undefined): string { + if (!node) { + return ''; + } + + return node.getText(sourceFile); +} + +export class SchemaScanner { + private readonly sourceFile: ts.SourceFile; + + constructor(schemaPath: string) { + let sourceCode: string; + + try { + sourceCode = readFileSync(schemaPath, 'utf-8'); + } catch (error) { + const message = error instanceof Error ? error.message : String(error); + throw new Error(`Failed to read schema file at ${schemaPath}: ${message}`); + } + + this.sourceFile = ts.createSourceFile(schemaPath, sourceCode, ts.ScriptTarget.Latest, true, ts.ScriptKind.TS); + } + + scan(): Record { + const tables: Record = {}; + + const visit = (node: ts.Node): void => { + if (ts.isVariableStatement(node)) { + for (const declaration of node.declarationList.declarations) { + if (!ts.isIdentifier(declaration.name) || !declaration.initializer) { + continue; + } + + const initializer = unwrapExpression(declaration.initializer); + if (!ts.isCallExpression(initializer)) { + continue; + } + + const functionName = getCallName(initializer); + if (functionName === 'sqliteTable' || functionName === 'pgTable' || functionName === 'mysqlTable') { + tables[declaration.name.text] = this.parseTable(initializer); + } + } + } + + ts.forEachChild(node, visit); + }; + + visit(this.sourceFile); + return tables; + } + + private parseTable(callExpression: ts.CallExpression): TableInfo { + const [nameArg, columnsArg, indexesArg] = callExpression.arguments; + const tableName = ts.isStringLiteral(nameArg) ? nameArg.text : getExpressionText(this.sourceFile, nameArg); + + const columns: Record = {}; + const relations: string[] = []; + + if (columnsArg && ts.isObjectLiteralExpression(columnsArg)) { + for (const property of columnsArg.properties) { + if (!ts.isPropertyAssignment(property)) { + continue; + } + + const columnName = ts.isIdentifier(property.name) + ? property.name.text + : ts.isStringLiteral(property.name) + ? property.name.text + : property.name.getText(this.sourceFile); + + const columnInfo = this.parseColumn(columnName, property.initializer); + columns[columnName] = columnInfo; + + if (columnInfo.references) { + relations.push(columnInfo.references); + } + } + } + + const indexes = this.parseIndexes(indexesArg); + + return { + name: tableName, + columns, + relations, + indexes, + }; + } + + private parseIndexes(indexesArg: ts.Expression | undefined): string[] { + if (!indexesArg) { + return []; + } + + const indexes: string[] = []; + const indexRoot = unwrapExpression(indexesArg); + + const collectFromObject = (obj: ts.ObjectLiteralExpression): void => { + for (const property of obj.properties) { + if (!ts.isPropertyAssignment(property)) { + continue; + } + + const value = unwrapExpression(property.initializer); + if (!ts.isCallExpression(value)) { + continue; + } + + const callName = getCallName(value); + if (callName === 'index' || callName === 'uniqueIndex') { + const key = ts.isIdentifier(property.name) + ? property.name.text + : ts.isStringLiteral(property.name) + ? property.name.text + : property.name.getText(this.sourceFile); + indexes.push(key); + } + } + }; + + if (ts.isArrowFunction(indexRoot) || ts.isFunctionExpression(indexRoot)) { + const body = indexRoot.body; + if (ts.isObjectLiteralExpression(body)) { + collectFromObject(body); + } + + if (ts.isBlock(body)) { + for (const statement of body.statements) { + if (!ts.isReturnStatement(statement) || !statement.expression) { + continue; + } + + const expression = unwrapExpression(statement.expression); + if (ts.isObjectLiteralExpression(expression)) { + collectFromObject(expression); + } + } + } + } + + return indexes; + } + + private parseColumn(columnName: string, expression: ts.Expression): ColumnInfo { + let type = 'unknown'; + let nullable = true; + let unique = false; + let primaryKey = false; + let defaultValue: string | undefined; + let references: string | undefined; + + let current = unwrapExpression(expression); + + while (ts.isCallExpression(current)) { + const methodName = getCallName(current); + + if (methodName === 'text' || methodName === 'varchar' || methodName === 'char') { + type = 'text'; + } else if (methodName === 'integer' || methodName === 'int' || methodName === 'bigint' || methodName === 'serial') { + type = 'integer'; + } else if (methodName === 'real' || methodName === 'numeric' || methodName === 'decimal' || methodName === 'doublePrecision') { + type = 'number'; + } else if (methodName === 'boolean') { + type = 'boolean'; + } else if (methodName === 'timestamp' || methodName === 'datetime') { + type = 'datetime'; + } else if (methodName === 'json' || methodName === 'jsonb') { + type = 'json'; + } else if (methodName === 'blob') { + type = 'blob'; + } else if (methodName === 'notNull') { + nullable = false; + } else if (methodName === 'unique') { + unique = true; + } else if (methodName === 'primaryKey') { + primaryKey = true; + nullable = false; + } else if (methodName.startsWith('default')) { + defaultValue = getExpressionText(this.sourceFile, current.arguments[0]); + } else if (methodName === 'references') { + references = getExpressionText(this.sourceFile, current.arguments[0]); + } + + if (ts.isPropertyAccessExpression(current.expression)) { + current = unwrapExpression(current.expression.expression); + continue; + } + + break; + } + + return { + name: columnName, + type, + nullable, + unique, + primaryKey, + defaultValue, + references, + }; + } +} diff --git a/betterbase/packages/cli/src/utils/schema-scanner.ts b/betterbase/packages/cli/src/utils/schema-scanner.ts new file mode 100644 index 0000000..ff8ea30 --- /dev/null +++ b/betterbase/packages/cli/src/utils/schema-scanner.ts @@ -0,0 +1,2 @@ +export { SchemaScanner } from './scanner'; +export type { ColumnInfo, TableInfo } from './scanner'; diff --git a/betterbase/packages/cli/test/context-generator.test.ts b/betterbase/packages/cli/test/context-generator.test.ts new file mode 100644 index 0000000..2499430 --- /dev/null +++ b/betterbase/packages/cli/test/context-generator.test.ts @@ -0,0 +1,107 @@ +import { mkdirSync, mkdtempSync, readFileSync, rmSync, writeFileSync } from 'node:fs'; +import { tmpdir } from 'node:os'; +import path from 'node:path'; +import { describe, expect, test } from 'bun:test'; +import { ContextGenerator } from '../src/utils/context-generator'; + +describe('ContextGenerator', () => { + test('creates .betterbase-context.json from schema and routes', async () => { + const root = mkdtempSync(path.join(tmpdir(), 'bb-context-')); + + try { + mkdirSync(path.join(root, 'src/db'), { recursive: true }); + mkdirSync(path.join(root, 'src/routes'), { recursive: true }); + + writeFileSync( + path.join(root, 'src/db/schema.ts'), + ` + import { sqliteTable, text } from 'drizzle-orm/sqlite-core'; + export const users = sqliteTable('users', { + id: text('id').primaryKey(), + email: text('email').notNull(), + }); + `, + ); + + writeFileSync( + path.join(root, 'src/routes/index.ts'), + ` + import { Hono } from 'hono'; + const app = new Hono(); + app.get('/health', (c) => c.json({ ok: true })); + export default app; + `, + ); + + const generator = new ContextGenerator(); + const context = await generator.generate(root); + + expect(context.tables.users).toBeDefined(); + expect(context.tables.users.columns.id).toBeDefined(); + expect(context.tables.users.columns.email).toBeDefined(); + expect(context.routes['/health']).toBeDefined(); + + const file = JSON.parse(readFileSync(path.join(root, '.betterbase-context.json'), 'utf-8')); + expect(file.tables.users.name).toBe('users'); + expect(file.tables.users.columns.id.type).toBe('text'); + expect(file.tables.users.columns.email.type).toBe('text'); + expect(Array.isArray(file.routes['/health'])).toBe(true); + expect(file.routes['/health'].length).toBeGreaterThan(0); + expect(file.routes['/health'][0].method).toBe('GET'); + } finally { + rmSync(root, { recursive: true, force: true }); + } + }); + + test('handles missing routes directory with empty routes', async () => { + const root = mkdtempSync(path.join(tmpdir(), 'bb-context-no-routes-')); + + try { + mkdirSync(path.join(root, 'src/db'), { recursive: true }); + writeFileSync( + path.join(root, 'src/db/schema.ts'), + ` + import { sqliteTable, text } from 'drizzle-orm/sqlite-core'; + export const users = sqliteTable('users', { id: text('id').primaryKey() }); + `, + ); + + const context = await new ContextGenerator().generate(root); + expect(context.routes).toEqual({}); + + const file = JSON.parse(readFileSync(path.join(root, '.betterbase-context.json'), 'utf-8')); + expect(file.routes).toEqual({}); + } finally { + rmSync(root, { recursive: true, force: true }); + } + }); + + test('handles empty schema file with empty tables', async () => { + const root = mkdtempSync(path.join(tmpdir(), 'bb-context-empty-schema-')); + + try { + mkdirSync(path.join(root, 'src/db'), { recursive: true }); + mkdirSync(path.join(root, 'src/routes'), { recursive: true }); + writeFileSync(path.join(root, 'src/db/schema.ts'), 'export {};\n'); + + const context = await new ContextGenerator().generate(root); + expect(context.tables).toEqual({}); + } finally { + rmSync(root, { recursive: true, force: true }); + } + }); + + test('handles missing schema file with empty tables', async () => { + const root = mkdtempSync(path.join(tmpdir(), 'bb-context-no-schema-')); + + try { + mkdirSync(path.join(root, 'src/routes'), { recursive: true }); + writeFileSync(path.join(root, 'src/routes/index.ts'), 'export {};\n'); + + const context = await new ContextGenerator().generate(root); + expect(context.tables).toEqual({}); + } finally { + rmSync(root, { recursive: true, force: true }); + } + }); +}); diff --git a/betterbase/packages/cli/test/route-scanner.test.ts b/betterbase/packages/cli/test/route-scanner.test.ts new file mode 100644 index 0000000..9f56991 --- /dev/null +++ b/betterbase/packages/cli/test/route-scanner.test.ts @@ -0,0 +1,45 @@ +import { mkdirSync, mkdtempSync, rmSync, writeFileSync } from 'node:fs'; +import { tmpdir } from 'node:os'; +import path from 'node:path'; +import { describe, expect, test } from 'bun:test'; +import { RouteScanner } from '../src/utils/route-scanner'; + +describe('RouteScanner', () => { + test('extracts hono routes with auth and schemas', async () => { + const root = mkdtempSync(path.join(tmpdir(), 'bb-routes-')); + + try { + const routesDir = path.join(root, 'src/routes'); + mkdirSync(routesDir, { recursive: true }); + + writeFileSync( + path.join(routesDir, 'users.ts'), + ` + import { Hono } from 'hono'; + import { z } from 'zod'; + import { authMiddleware } from '../middleware/auth'; + + const createUserSchema = z.object({ email: z.string().email() }); + export const users = new Hono(); + + users.get('/users', authMiddleware, (c) => c.json({ users: [] })); + users.post('/users', async (c) => { + const body = await c.req.json(); + createUserSchema.parse(body); + return c.json({ ok: true }); + }); + `, + ); + + const scanner = new RouteScanner(); + const routes = scanner.scan(routesDir); + + expect(routes['/users']).toBeDefined(); + expect(routes['/users'].length).toBe(2); + expect(routes['/users'][0].requiresAuth).toBe(true); + expect(routes['/users'][1].inputSchema).toBe('createUserSchema'); + } finally { + rmSync(root, { recursive: true, force: true }); + } + }); +}); diff --git a/betterbase/packages/cli/test/scanner.test.ts b/betterbase/packages/cli/test/scanner.test.ts new file mode 100644 index 0000000..edf5594 --- /dev/null +++ b/betterbase/packages/cli/test/scanner.test.ts @@ -0,0 +1,61 @@ +import { mkdtempSync, rmSync, writeFileSync } from 'node:fs'; +import { tmpdir } from 'node:os'; +import path from 'node:path'; +import { describe, expect, test } from 'bun:test'; +import { SchemaScanner } from '../src/utils/scanner'; + +describe('SchemaScanner', () => { + test('extracts tables, columns, relations, and indexes from drizzle schema', () => { + const dir = mkdtempSync(path.join(tmpdir(), 'bb-scanner-')); + + try { + const schemaPath = path.join(dir, 'schema.ts'); + writeFileSync( + schemaPath, + ` + import { sqliteTable, text, integer, index } from 'drizzle-orm/sqlite-core'; + + export const users = sqliteTable('users', { + id: text('id').primaryKey(), + email: text('email').notNull().unique(), + age: integer('age').default(18), + }, (table) => ({ + usersEmailIdx: index('users_email_idx').on(table.email), + })); + + export const posts = sqliteTable('posts', { + id: text('id').primaryKey(), + userId: text('user_id').notNull().references(() => users.id), + title: text('title').notNull(), + }); + + export const comments = sqliteTable('comments', { + id: text('id').primaryKey(), + postId: text('post_id').notNull().references(() => posts.id), + body: text('body'), + }); + `, + ); + + const scanner = new SchemaScanner(schemaPath); + const tables = scanner.scan(); + + expect(Object.keys(tables)).toEqual(['users', 'posts', 'comments']); + + expect(tables.users.name).toBe('users'); + expect(tables.users.columns.id.primaryKey).toBe(true); + expect(tables.users.columns.id.nullable).toBe(false); + expect(tables.users.columns.email.unique).toBe(true); + expect(tables.users.columns.age.defaultValue).toBe('18'); + expect(tables.users.indexes).toContain('usersEmailIdx'); + + expect(tables.posts.columns.userId.references).toBe('() => users.id'); + expect(tables.posts.relations).toContain('() => users.id'); + + expect(tables.comments.columns.postId.references).toBe('() => posts.id'); + expect(tables.comments.relations).toContain('() => posts.id'); + } finally { + rmSync(dir, { recursive: true, force: true }); + } + }); +}); diff --git a/betterbase/packages/cli/test/smoke.test.ts b/betterbase/packages/cli/test/smoke.test.ts index f082470..5c49ed4 100644 --- a/betterbase/packages/cli/test/smoke.test.ts +++ b/betterbase/packages/cli/test/smoke.test.ts @@ -14,9 +14,42 @@ describe('cli', () => { expect(init?.registeredArguments[0]?.name()).toBe('project-name'); }); - test('registers migrate command', () => { + + + + test('registers generate crud command', () => { + const program = createProgram(); + const generate = program.commands.find((command) => command.name() === 'generate'); + expect(generate).toBeDefined(); + + const crud = generate?.commands.find((command) => command.name() === 'crud'); + expect(crud).toBeDefined(); + }); + + test('registers auth setup command', () => { + const program = createProgram(); + const auth = program.commands.find((command) => command.name() === 'auth'); + expect(auth).toBeDefined(); + + const setup = auth?.commands.find((command) => command.name() === 'setup'); + expect(setup).toBeDefined(); + }); + + test('registers dev command', () => { + const program = createProgram(); + const dev = program.commands.find((command) => command.name() === 'dev'); + expect(dev).toBeDefined(); + }); + + test('registers migrate commands', () => { const program = createProgram(); + const migrate = program.commands.find((command) => command.name() === 'migrate'); + const preview = program.commands.find((command) => command.name() === 'migrate:preview'); + const production = program.commands.find((command) => command.name() === 'migrate:production'); + expect(migrate).toBeDefined(); + expect(preview).toBeDefined(); + expect(production).toBeDefined(); }); }); diff --git a/betterbase/templates/base/README.md b/betterbase/templates/base/README.md index e6d4cf1..8d481e3 100644 --- a/betterbase/templates/base/README.md +++ b/betterbase/templates/base/README.md @@ -26,3 +26,15 @@ src/ betterbase.config.ts drizzle.config.ts ``` + + +## Quick Start + +- Install dependencies: `bun install` +- Start development server: `bun run dev` +- Generate Drizzle migrations: `bun run db:generate` +- Apply migrations locally: `bun run db:push` +- Build for production: `bun run build` +- Start production server: `bun run start` + +Environment variables are validated in `src/lib/env.ts` (`NODE_ENV`, `PORT`, `DB_PATH`). diff --git a/betterbase/templates/base/package.json b/betterbase/templates/base/package.json index e631611..c74eee0 100644 --- a/betterbase/templates/base/package.json +++ b/betterbase/templates/base/package.json @@ -6,7 +6,9 @@ "dev": "bun --hot run src/index.ts", "db:generate": "drizzle-kit generate", "db:push": "bun run src/db/migrate.ts", - "typecheck": "tsc --noEmit" + "typecheck": "tsc --noEmit", + "build": "bun build src/index.ts --outfile dist/index.js --target bun", + "start": "bun run dist/index.js" }, "dependencies": { "hono": "^4.6.10", diff --git a/betterbase/templates/base/src/db/index.ts b/betterbase/templates/base/src/db/index.ts index ddbbc3f..485c7b4 100644 --- a/betterbase/templates/base/src/db/index.ts +++ b/betterbase/templates/base/src/db/index.ts @@ -1,8 +1,10 @@ import { Database } from 'bun:sqlite'; import { drizzle } from 'drizzle-orm/bun-sqlite'; +import { env } from '../lib/env'; import * as schema from './schema'; -const dbPath = process.env.DB_PATH ?? 'local.db'; -const sqlite = new Database(dbPath, { create: true }); + +// env.DB_PATH is always present because env schema provides a default. +const sqlite = new Database(env.DB_PATH, { create: true }); export const db = drizzle(sqlite, { schema }); diff --git a/betterbase/templates/base/src/db/migrate.ts b/betterbase/templates/base/src/db/migrate.ts index c590040..2bdd9bf 100644 --- a/betterbase/templates/base/src/db/migrate.ts +++ b/betterbase/templates/base/src/db/migrate.ts @@ -1,9 +1,10 @@ import { Database } from 'bun:sqlite'; import { drizzle } from 'drizzle-orm/bun-sqlite'; import { migrate } from 'drizzle-orm/bun-sqlite/migrator'; +import { DEFAULT_DB_PATH } from '../lib/env'; try { - const sqlite = new Database(process.env.DB_PATH ?? 'local.db', { create: true }); + const sqlite = new Database(DEFAULT_DB_PATH, { create: true }); const db = drizzle(sqlite); migrate(db, { migrationsFolder: './drizzle' }); diff --git a/betterbase/templates/base/src/index.ts b/betterbase/templates/base/src/index.ts index 4065ba5..b8f9534 100644 --- a/betterbase/templates/base/src/index.ts +++ b/betterbase/templates/base/src/index.ts @@ -1,13 +1,33 @@ import { Hono } from 'hono'; +import { upgradeWebSocket } from 'hono/bun'; +import { env } from './lib/env'; +import { realtime } from './lib/realtime'; import { registerRoutes } from './routes'; const app = new Hono(); + +app.get( + '/ws', + upgradeWebSocket(() => ({ + onOpen(_event, ws) { + realtime.handleConnection(ws.raw); + }, + onMessage(event, ws) { + const message = typeof event.data === 'string' ? event.data : event.data.toString(); + realtime.handleMessage(ws.raw, message); + }, + onClose(_event, ws) { + realtime.handleClose(ws.raw); + }, + })), +); + registerRoutes(app); const server = Bun.serve({ fetch: app.fetch, - port: Number(process.env.PORT ?? 3000), - development: process.env.NODE_ENV === 'development', + port: env.PORT, + development: env.NODE_ENV === 'development', }); console.log(`šŸš€ Server running at http://localhost:${server.port}`); diff --git a/betterbase/templates/base/src/lib/env.ts b/betterbase/templates/base/src/lib/env.ts index c4a0b38..9967db2 100644 --- a/betterbase/templates/base/src/lib/env.ts +++ b/betterbase/templates/base/src/lib/env.ts @@ -1,8 +1,12 @@ import { z } from 'zod'; +// Keep in sync with packages/cli/src/constants.ts DEFAULT_DB_PATH. +export const DEFAULT_DB_PATH = 'local.db'; + const envSchema = z.object({ NODE_ENV: z.enum(['development', 'test', 'production']).default('development'), - PORT: z.coerce.number().default(3000), + PORT: z.coerce.number().int().positive().default(3000), + DB_PATH: z.string().min(1).default(DEFAULT_DB_PATH), }); export const env = envSchema.parse(process.env); diff --git a/betterbase/templates/base/src/lib/realtime.ts b/betterbase/templates/base/src/lib/realtime.ts new file mode 100644 index 0000000..ace9c71 --- /dev/null +++ b/betterbase/templates/base/src/lib/realtime.ts @@ -0,0 +1,188 @@ +import type { ServerWebSocket } from 'bun'; + +export interface Subscription { + table: string; + filter?: Record; +} + +interface Client { + ws: ServerWebSocket; + subscriptions: Map; +} + +interface RealtimeUpdatePayload { + type: 'update'; + table: string; + event: 'INSERT' | 'UPDATE' | 'DELETE'; + data: unknown; + timestamp: string; +} + +const realtimeLogger = { + debug: (message: string): void => console.debug(`[realtime] ${message}`), + info: (message: string): void => console.info(`[realtime] ${message}`), + warn: (message: string): void => console.warn(`[realtime] ${message}`), +}; + +export class RealtimeServer { + private clients = new Map, Client>(); + private tableSubscribers = new Map>>(); + + handleConnection(ws: ServerWebSocket): void { + realtimeLogger.info('Client connected'); + this.clients.set(ws, { + ws, + subscriptions: new Map(), + }); + } + + handleMessage(ws: ServerWebSocket, rawMessage: string): void { + try { + const data = JSON.parse(rawMessage) as { type?: string; table?: string; filter?: Record }; + + if (!data.type || !data.table) { + this.safeSend(ws, { error: 'Message must include type and table' }); + return; + } + + switch (data.type) { + case 'subscribe': + this.subscribe(ws, data.table, data.filter); + break; + case 'unsubscribe': + this.unsubscribe(ws, data.table); + break; + default: + this.safeSend(ws, { error: 'Unknown message type' }); + break; + } + } catch { + this.safeSend(ws, { error: 'Invalid message format' }); + } + } + + handleClose(ws: ServerWebSocket): void { + realtimeLogger.info('Client disconnected'); + + const client = this.clients.get(ws); + if (client) { + for (const table of client.subscriptions.keys()) { + const subscribers = this.tableSubscribers.get(table); + subscribers?.delete(ws); + + if (subscribers && subscribers.size === 0) { + this.tableSubscribers.delete(table); + } + } + } + + this.clients.delete(ws); + } + + broadcast(table: string, event: RealtimeUpdatePayload['event'], data: unknown): void { + const subscribers = this.tableSubscribers.get(table); + if (!subscribers || subscribers.size === 0) { + return; + } + + const initialCount = subscribers.size; + + const payload: RealtimeUpdatePayload = { + type: 'update', + table, + event, + data, + timestamp: new Date().toISOString(), + }; + + const message = JSON.stringify(payload); + + for (const ws of [...subscribers]) { + const client = this.clients.get(ws); + const subscription = client?.subscriptions.get(table); + if (!this.matchesFilter(subscription?.filter, data)) { + continue; + } + + if (!this.safeSend(ws, message)) { + subscribers.delete(ws); + this.handleClose(ws); + } + } + + realtimeLogger.debug(`Broadcasted ${event} on ${table} to ${initialCount} clients`); + } + + private subscribe(ws: ServerWebSocket, table: string, filter?: Record): void { + const client = this.clients.get(ws); + if (!client) { + return; + } + + client.subscriptions.set(table, { table, filter }); + + if (!this.tableSubscribers.has(table)) { + this.tableSubscribers.set(table, new Set()); + } + + this.tableSubscribers.get(table)?.add(ws); + + this.safeSend(ws, { + type: 'subscribed', + table, + filter, + }); + + realtimeLogger.debug(`Client subscribed to ${table}`); + } + + private unsubscribe(ws: ServerWebSocket, table: string): void { + const client = this.clients.get(ws); + if (!client) { + return; + } + + client.subscriptions.delete(table); + const subscribers = this.tableSubscribers.get(table); + subscribers?.delete(ws); + + if (subscribers && subscribers.size === 0) { + this.tableSubscribers.delete(table); + } + + this.safeSend(ws, { + type: 'unsubscribed', + table, + }); + } + + private matchesFilter(filter: Record | undefined, payload: unknown): boolean { + if (!filter || Object.keys(filter).length === 0) { + return true; + } + + if (!payload || typeof payload !== 'object') { + return false; + } + + const data = payload as Record; + return Object.entries(filter).every(([key, value]) => data[key] === value); + } + + private safeSend(ws: ServerWebSocket, payload: object | string): boolean { + if (ws.readyState !== WebSocket.OPEN) { + return false; + } + + try { + ws.send(typeof payload === 'string' ? payload : JSON.stringify(payload)); + return true; + } catch (error) { + const message = error instanceof Error ? error.message : String(error); + realtimeLogger.warn(`WebSocket send failed: ${message}`); + return false; + } + } +} + +export const realtime = new RealtimeServer(); diff --git a/betterbase/templates/base/src/routes/index.ts b/betterbase/templates/base/src/routes/index.ts index 64a9e83..2fb73e6 100644 --- a/betterbase/templates/base/src/routes/index.ts +++ b/betterbase/templates/base/src/routes/index.ts @@ -2,6 +2,7 @@ import { Hono } from 'hono'; import { cors } from 'hono/cors'; import { logger } from 'hono/logger'; import { HTTPException } from 'hono/http-exception'; +import { env } from '../lib/env'; import { healthRoute } from './health'; import { usersRoute } from './users'; @@ -11,12 +12,12 @@ export function registerRoutes(app: Hono): void { app.onError((err, c) => { const isHttpError = err instanceof HTTPException; - const showDetailedError = process.env.NODE_ENV === 'development' || isHttpError; + const showDetailedError = env.NODE_ENV === 'development' || isHttpError; return c.json( { error: showDetailedError ? err.message : 'Internal Server Error', - stack: process.env.NODE_ENV === 'development' ? err.stack : undefined, + stack: env.NODE_ENV === 'development' ? err.stack : undefined, details: isHttpError ? (err as { cause?: unknown }).cause ?? null : null, }, isHttpError ? err.status : 500, diff --git a/betterbase/templates/base/src/routes/users.ts b/betterbase/templates/base/src/routes/users.ts index 29f17c6..7d9504c 100644 --- a/betterbase/templates/base/src/routes/users.ts +++ b/betterbase/templates/base/src/routes/users.ts @@ -1,6 +1,7 @@ +import { asc } from 'drizzle-orm'; import { Hono } from 'hono'; import { HTTPException } from 'hono/http-exception'; -import { z } from 'zod'; +import { z, ZodError } from 'zod'; import { db } from '../db'; import { users } from '../db/schema'; import { parseBody } from '../middleware/validation'; @@ -10,11 +11,69 @@ export const createUserSchema = z.object({ name: z.string().min(1), }); +const DEFAULT_LIMIT = 25; +const MAX_LIMIT = 100; +const DEFAULT_OFFSET = 0; + +const paginationSchema = z.object({ + limit: z.coerce.number().int().nonnegative().default(DEFAULT_LIMIT), + offset: z.coerce.number().int().nonnegative().default(DEFAULT_OFFSET), +}); + export const usersRoute = new Hono(); usersRoute.get('/', async (c) => { - const allUsers = await db.select().from(users); - return c.json({ users: allUsers }); + try { + const pagination = paginationSchema.parse({ + limit: c.req.query('limit'), + offset: c.req.query('offset'), + }); + + const limit = Math.min(pagination.limit, MAX_LIMIT); + const offset = pagination.offset; + + if (limit === 0) { + return c.json({ + users: [], + pagination: { + limit, + offset, + // No DB query is run for limit=0, so hasMore cannot be determined. + hasMore: null, + }, + }); + } + + const rows = await db.select().from(users).orderBy(asc(users.id)).limit(limit + 1).offset(offset); + const hasMore = rows.length > limit; + const paginatedUsers = rows.slice(0, limit); + + return c.json({ + users: paginatedUsers, + pagination: { + limit, + offset, + hasMore, + }, + }); + } catch (error) { + if (error instanceof HTTPException) { + throw error; + } + + if (error instanceof ZodError) { + return c.json( + { + error: 'Invalid pagination query parameters', + details: error.issues, + }, + 400, + ); + } + + console.error('Failed to fetch users:', error); + throw error; + } }); usersRoute.post('/', async (c) => { diff --git a/betterbase/tsconfig.base.json b/betterbase/tsconfig.base.json index b1d6a90..e86a10c 100644 --- a/betterbase/tsconfig.base.json +++ b/betterbase/tsconfig.base.json @@ -8,6 +8,7 @@ "skipLibCheck": true, "resolveJsonModule": true, "isolatedModules": true, - "forceConsistentCasingInFileNames": true + "forceConsistentCasingInFileNames": true, + "declaration": true } }