diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml new file mode 100644 index 0000000..30caf39 --- /dev/null +++ b/.github/workflows/ci.yml @@ -0,0 +1,16 @@ +name: CI + +on: + push: + branches: [main] + pull_request: + +jobs: + lint: + uses: listee-dev/listee-ci/.github/workflows/lint.yml@main + + typecheck: + uses: listee-dev/listee-ci/.github/workflows/typecheck.yml@main + + test: + uses: listee-dev/listee-ci/.github/workflows/test.yml@main diff --git a/AGENTS.md b/AGENTS.md index 8824873..1c6bae6 100644 --- a/AGENTS.md +++ b/AGENTS.md @@ -3,19 +3,26 @@ ## Project Structure & Module Organization This monorepo uses Bun workspaces. Each package lives in `packages/` with source under `src/`. Build outputs flow to `dist/` and must never be edited manually. Shared tooling lives at the root (`tsconfig.json`, `biome.json`, `vitest.config.ts`, `bun.lock`); review cross-package impact before changing these files. +## Dependency Management +- Use the root `package.json` `catalog` to pin shared dependency versions. Packages reference catalog entries with the `"catalog:"` protocol. +- Add new shared dependencies to the root catalog before consuming them in individual packages. This keeps versions centralized and avoids drift across workspaces. +- Always run `bun install` from the repository root so that catalog resolutions and the shared `bun.lock` stay in sync. +- When publishing npm packages, ensure you build or pack with Bun (`bun pm pack` / `bun publish`) so catalog references collapse to concrete semver ranges. + ## Build, Test, and Development Commands - `bun install` — Sync dependencies and respect the lockfile used in CI. - `bun run build` — Run the TypeScript project references build, emitting artifacts to every `dist/` folder. -- `bun run lint` — Execute Biome formatter and linter in a single pass. -- `bun run test` — Launch Vitest in the Node environment for the entire workspace. +- `bun run lint` — Execute Biome formatter and linter in a single pass (only `packages/*/src/**` is scanned via `files.includes`). +- `bun test` or `bun run test` — Execute Bun's built-in test runner across the workspace (see `packages/db/src/index.test.ts` for examples). - `bun run changeset` — Draft release notes and version bumps via Changesets. - `bun run clean` — Remove build artifacts and reinstall dependencies (does not delete untracked source files). ## Coding Style & Naming Conventions TypeScript runs with `strict` enabled; avoid implicit `any` and replace `as` casts with dedicated type guards or the `satisfies` operator where appropriate. Prefer `unknown` for external inputs. Use kebab-case for package folders, PascalCase for types and enums, and camelCase for variables and functions. Always commit the formatter output produced by `bun run lint`. +All source comments, test names, and test descriptions must be written in English. ## Testing Guidelines -Vitest is the test runner. Co-locate tests as `*.test.ts` files or inside `__tests__/`. Name suites with behavior-focused sentences so failures highlight intent. For new features, cover both success paths and the most representative error paths. Run `bun run test` (and `bun run build` when touching types) before opening a PR. +Use Bun's built-in test runner. Co-locate tests as `*.test.ts` files or inside `__tests__/`. Name suites with behavior-focused sentences so failures highlight intent. For new features, cover both success paths and the most representative error paths. Run `bun test` (and `bun run build` when touching types) before opening a PR. ## Commit & Pull Request Guidelines Write imperative commit summaries under 50 characters (e.g., `Add chat session schema`) and include context, impact, and test notes in the body when needed. PR descriptions must capture purpose, key changes, test evidence, linked issues, and screenshots or logs for user-facing updates. Attach the latest `.changeset/` entry whenever a release is required. @@ -23,3 +30,13 @@ Write imperative commit summaries under 50 characters (e.g., `Add chat session s ## Security & Release Management Never commit secrets; surface runtime configuration via factories that accept environment values. Version changes must follow SemVer, with breaking updates declared in Changesets. Verify releases by checking the generated changelog and confirming publication for each package on npm. Enable secret scanning and push protection in CI (e.g., gitleaks), and require npm 2FA + provenance for publishing. + +## Architecture Playbook +- Maintain a single dependency direction (`routes → queries → services → repositories`) so that upper layers stay ignorant of lower-level details. +- `routes` should delegate exclusively to `queries`, translate their results into HTTP responses, and decide status codes. Avoid placing business logic here. +- `queries` compose the necessary `services` and `repositories` per use case. Inject dependencies through factories so tests can swap in mocks easily. +- `services` may depend on `repositories`, but repositories must never depend on services. Extract complex domain logic into dedicated modules and keep the service layer thin. +- `repositories` encapsulate external SDK, SQL, or KV access and return plain or domain-specific types (`string`, `Date`, structured objects) to callers. +- Separate authentication and authorization concerns inside `packages/auth`. Place runtime-specific adapters under `authentication/` (`header.ts`, `supabase.ts`, shared utilities, and `errors.ts`) and domain policies under `authorization/` (e.g., `policies/chat.ts` exposing `canAccessChat`). Policies can declare repository interfaces and receive concrete implementations via dependency injection. +- Supabase JWT verification is handled by `createSupabaseAuthentication`, which fetches the JWKS from `.well-known/jwks.json`; only extend this provider via dedicated modules so JWKS caching and claim validation remain centralized. +- Process authenticated requests in the order `Route Handler → Authentication → Queries → Authorization → Services/Repositories`, passing the authenticated actor into queries before evaluating policies. diff --git a/README.md b/README.md index dec7a06..3da8ae4 100644 --- a/README.md +++ b/README.md @@ -1,7 +1,7 @@ # listee-libs ## Overview -`listee-libs` is the public monorepo that hosts the Listee SDK packages. Each module is published under the `@listee/*` scope so downstream applications (API, web, CLI, MCP) can consume them independently. The initial release focuses on `@listee/types` and `@listee/db`, with additional packages (`auth`, `chat`, `ui`, `sdk`) following incrementally. +`listee-libs` is the public monorepo that hosts the Listee SDK packages. Each module is published under the `@listee/*` scope so downstream applications (API, web, CLI, MCP) can consume them independently. The initial release focuses on `@listee/db` (database access layer) and `@listee/auth` (token verification utilities), with `@listee/types` and other packages (`chat`, `ui`, `sdk`) landing incrementally. ## Repository Layout - `packages/` — Individual packages with their implementation in `src/` and compiled output in `dist/`. @@ -11,15 +11,51 @@ - `.github/workflows/` — CI pipelines based on `listee-dev/listee-ci@v1` workflows. ## Getting Started -1. Install Bun `1.2.19` (or later). We recommend pinning via `"packageManager": "bun@1.2.19"` in the root package.json for reproducibility. -2. Run `bun install` to sync dependencies. +1. Install Bun `1.2.22` (or later). We recommend pinning via `"packageManager": "bun@1.2.22"` in the root package.json for reproducibility. +2. Run `bun install` at the repository root (catalog-aware installation for every workspace). 3. Use `bun run lint`, `bun run build`, and `bun run test` to verify changes locally. 4. Initialize Changesets with `bun run changeset init` if you are bootstrapping a fresh clone. +## Packages + +### `@listee/db` +- Provides a thin Postgres + Drizzle ORM layer with connection caching for local development. +- Requires `POSTGRES_URL` to be defined before calling `createPostgresConnection`. +- Exposes helpers: + - `createPostgresConnection` — returns a cached `postgres` client (disabled in production); accepts optional overrides. + - `db` — shared `drizzle-orm` database instance backed by the cached connection. + - `createRlsClient`/`createDrizzle` — wrap transactions with Supabase-style RLS claims and role switching. +- Publishes generated types alongside compiled output (`sideEffects: false` for optimal tree-shaking). +- Ships with Bun-based unit tests (`packages/db/src/index.test.ts`) that mock `postgres`/`drizzle-orm`. Run `bun test` from the repo root to execute them. + +### `@listee/auth` +- Exposes reusable authentication providers under `packages/auth/src/authentication/`. +- `createHeaderAuthentication` performs lightweight header extraction suitable for development stubs. +- `createSupabaseAuthentication` validates Supabase-issued JWT access tokens against the project's JWKS (`/auth/v1/.well-known/jwks.json`), enforces issuer/audience/role constraints, and returns a typed `SupabaseToken` payload. +- Shared utilities (`shared.ts`, `errors.ts`) handle predictable error surfaces; tests live beside the implementation (`supabase.test.ts`) and exercise positive/negative paths. +- The package emits declarations from `src/` only; test files are excluded from `dist/` via `tsconfig.json`. + ## Contribution Notes - Follow the guidance in `AGENTS.md` for agent automation workflows and repository conventions. - Keep documentation and code comments in English. - Coordinate feature work through focused branches (`feature/...`, `chore/...`, etc.) and submit PRs with clear descriptions, linked issues, and test evidence. +## Architecture Guidelines + +### Responsibility Boundaries +- `routes` only depend on `queries`, translate the return values into HTTP responses, and decide status codes. +- `queries` orchestrate the necessary `services` and `repositories` for each use case and accept dependencies via factories so they remain easy to test. +- `services` may depend on `repositories` (never the other way around). When a service grows large, consider moving domain logic under a dedicated `domain/` module and keeping application services thin. +- `repositories` sit at the bottom layer and encapsulate external SDK calls, SQL, or KV access. They should return plain TypeScript/domain types (`string`, `Date`, structured objects) to upstream layers. + +### Dependency Flow +Keep a single direction: `routes → queries → (services → repositories)`. With the stack arranged this way you can reuse everything below `queries` across different runtimes (e.g., Cloudflare Workers) and mock each layer in isolation during tests. + +### Authentication vs Authorization +- Treat authentication (identifying who the caller is) and authorization (deciding what that caller may do) as separate concerns under the `auth` package. +- Place authentication adapters in `packages/auth/src/authentication/` and expose helpers such as `getAuthenticatedUser(request)` so each runtime can plug in its own token/session verification. +- Organize authorization policies under `packages/auth/src/authorization/` with domain-specific modules (e.g., `policies/chat.ts` providing `canAccessChat`). Policies may declare repository interfaces that the application injects, keeping policy evaluation independent from data fetching details. +- The recommended execution order for an authenticated endpoint is `Route Handler → Authentication → Queries → Authorization → Services/Repositories`. Queries receive the authenticated actor (for example, via context) and call the relevant authorization policy before touching domain services. + ## Release Process Changesets drive versioning and publishing. Merging to `main` triggers the shared CI pipelines, including the release workflow that prepares npm publications. Confirm published versions for `@listee/types` and `@listee/db` before announcing availability to downstream projects. diff --git a/biome.json b/biome.json new file mode 100644 index 0000000..c91b15f --- /dev/null +++ b/biome.json @@ -0,0 +1,28 @@ +{ + "$schema": "https://biomejs.dev/schemas/2.2.4/schema.json", + "formatter": { + "enabled": true, + "indentStyle": "space" + }, + "assist": { + "actions": { + "source": { + "organizeImports": "on" + } + } + }, + "linter": { + "enabled": true, + "rules": { + "recommended": true + } + }, + "javascript": { + "formatter": { + "quoteStyle": "double" + } + }, + "files": { + "includes": ["packages/*/src/**"] + } +} diff --git a/bun.lock b/bun.lock new file mode 100644 index 0000000..cb3347b --- /dev/null +++ b/bun.lock @@ -0,0 +1,98 @@ +{ + "lockfileVersion": 1, + "workspaces": { + "": { + "name": "listee-libs", + "devDependencies": { + "@biomejs/biome": "2.2.4", + "@types/bun": "1.2.22", + }, + }, + "packages/api": { + "name": "@listee/api", + "version": "0.0.0", + "dependencies": { + "@listee/auth": "workspace:*", + "@listee/db": "workspace:*", + "@listee/types": "workspace:*", + "hono": "catalog:", + }, + }, + "packages/auth": { + "name": "@listee/auth", + "version": "0.0.0", + "dependencies": { + "@listee/types": "workspace:^", + "jose": "catalog:", + }, + }, + "packages/db": { + "name": "@listee/db", + "version": "0.0.0", + "dependencies": { + "drizzle-orm": "catalog:", + "postgres": "catalog:", + }, + }, + "packages/types": { + "name": "@listee/types", + "version": "0.0.0", + "dependencies": { + "@listee/db": "workspace:^", + }, + }, + }, + "catalog": { + "drizzle-orm": "^0.44.5", + "hono": "^4.4.4", + "jose": "^5.2.3", + "postgres": "^3.4.7", + }, + "packages": { + "@biomejs/biome": ["@biomejs/biome@2.2.4", "", { "optionalDependencies": { "@biomejs/cli-darwin-arm64": "2.2.4", "@biomejs/cli-darwin-x64": "2.2.4", "@biomejs/cli-linux-arm64": "2.2.4", "@biomejs/cli-linux-arm64-musl": "2.2.4", "@biomejs/cli-linux-x64": "2.2.4", "@biomejs/cli-linux-x64-musl": "2.2.4", "@biomejs/cli-win32-arm64": "2.2.4", "@biomejs/cli-win32-x64": "2.2.4" }, "bin": { "biome": "bin/biome" } }, "sha512-TBHU5bUy/Ok6m8c0y3pZiuO/BZoY/OcGxoLlrfQof5s8ISVwbVBdFINPQZyFfKwil8XibYWb7JMwnT8wT4WVPg=="], + + "@biomejs/cli-darwin-arm64": ["@biomejs/cli-darwin-arm64@2.2.4", "", { "os": "darwin", "cpu": "arm64" }, "sha512-RJe2uiyaloN4hne4d2+qVj3d3gFJFbmrr5PYtkkjei1O9c+BjGXgpUPVbi8Pl8syumhzJjFsSIYkcLt2VlVLMA=="], + + "@biomejs/cli-darwin-x64": ["@biomejs/cli-darwin-x64@2.2.4", "", { "os": "darwin", "cpu": "x64" }, "sha512-cFsdB4ePanVWfTnPVaUX+yr8qV8ifxjBKMkZwN7gKb20qXPxd/PmwqUH8mY5wnM9+U0QwM76CxFyBRJhC9tQwg=="], + + "@biomejs/cli-linux-arm64": ["@biomejs/cli-linux-arm64@2.2.4", "", { "os": "linux", "cpu": "arm64" }, "sha512-M/Iz48p4NAzMXOuH+tsn5BvG/Jb07KOMTdSVwJpicmhN309BeEyRyQX+n1XDF0JVSlu28+hiTQ2L4rZPvu7nMw=="], + + "@biomejs/cli-linux-arm64-musl": ["@biomejs/cli-linux-arm64-musl@2.2.4", "", { "os": "linux", "cpu": "arm64" }, "sha512-7TNPkMQEWfjvJDaZRSkDCPT/2r5ESFPKx+TEev+I2BXDGIjfCZk2+b88FOhnJNHtksbOZv8ZWnxrA5gyTYhSsQ=="], + + "@biomejs/cli-linux-x64": ["@biomejs/cli-linux-x64@2.2.4", "", { "os": "linux", "cpu": "x64" }, "sha512-orr3nnf2Dpb2ssl6aihQtvcKtLySLta4E2UcXdp7+RTa7mfJjBgIsbS0B9GC8gVu0hjOu021aU8b3/I1tn+pVQ=="], + + "@biomejs/cli-linux-x64-musl": ["@biomejs/cli-linux-x64-musl@2.2.4", "", { "os": "linux", "cpu": "x64" }, "sha512-m41nFDS0ksXK2gwXL6W6yZTYPMH0LughqbsxInSKetoH6morVj43szqKx79Iudkp8WRT5SxSh7qVb8KCUiewGg=="], + + "@biomejs/cli-win32-arm64": ["@biomejs/cli-win32-arm64@2.2.4", "", { "os": "win32", "cpu": "arm64" }, "sha512-NXnfTeKHDFUWfxAefa57DiGmu9VyKi0cDqFpdI+1hJWQjGJhJutHPX0b5m+eXvTKOaf+brU+P0JrQAZMb5yYaQ=="], + + "@biomejs/cli-win32-x64": ["@biomejs/cli-win32-x64@2.2.4", "", { "os": "win32", "cpu": "x64" }, "sha512-3Y4V4zVRarVh/B/eSHczR4LYoSVyv3Dfuvm3cWs5w/HScccS0+Wt/lHOcDTRYeHjQmMYVC3rIRWqyN2EI52+zg=="], + + "@listee/api": ["@listee/api@workspace:packages/api"], + + "@listee/auth": ["@listee/auth@workspace:packages/auth"], + + "@listee/db": ["@listee/db@workspace:packages/db"], + + "@listee/types": ["@listee/types@workspace:packages/types"], + + "@types/bun": ["@types/bun@1.2.22", "", { "dependencies": { "bun-types": "1.2.22" } }, "sha512-5A/KrKos2ZcN0c6ljRSOa1fYIyCKhZfIVYeuyb4snnvomnpFqC0tTsEkdqNxbAgExV384OETQ//WAjl3XbYqQA=="], + + "@types/node": ["@types/node@24.5.2", "", { "dependencies": { "undici-types": "~7.12.0" } }, "sha512-FYxk1I7wPv3K2XBaoyH2cTnocQEu8AOZ60hPbsyukMPLv5/5qr7V1i8PLHdl6Zf87I+xZXFvPCXYjiTFq+YSDQ=="], + + "@types/react": ["@types/react@19.1.13", "", { "dependencies": { "csstype": "^3.0.2" } }, "sha512-hHkbU/eoO3EG5/MZkuFSKmYqPbSVk5byPFa3e7y/8TybHiLMACgI8seVYlicwk7H5K/rI2px9xrQp/C+AUDTiQ=="], + + "bun-types": ["bun-types@1.2.22", "", { "dependencies": { "@types/node": "*" }, "peerDependencies": { "@types/react": "^19" } }, "sha512-hwaAu8tct/Zn6Zft4U9BsZcXkYomzpHJX28ofvx7k0Zz2HNz54n1n+tDgxoWFGB4PcFvJXJQloPhaV2eP3Q6EA=="], + + "csstype": ["csstype@3.1.3", "", {}, "sha512-M1uQkMl8rQK/szD0LNhtqxIPLpimGm8sOBwU7lLnCpSbTyY3yeU1Vc7l4KT5zT4s/yOxHH5O7tIuuLOCnLADRw=="], + + "drizzle-orm": ["drizzle-orm@0.44.5", "", { "peerDependencies": { "@aws-sdk/client-rds-data": ">=3", "@cloudflare/workers-types": ">=4", "@electric-sql/pglite": ">=0.2.0", "@libsql/client": ">=0.10.0", "@libsql/client-wasm": ">=0.10.0", "@neondatabase/serverless": ">=0.10.0", "@op-engineering/op-sqlite": ">=2", "@opentelemetry/api": "^1.4.1", "@planetscale/database": ">=1.13", "@prisma/client": "*", "@tidbcloud/serverless": "*", "@types/better-sqlite3": "*", "@types/pg": "*", "@types/sql.js": "*", "@upstash/redis": ">=1.34.7", "@vercel/postgres": ">=0.8.0", "@xata.io/client": "*", "better-sqlite3": ">=7", "bun-types": "*", "expo-sqlite": ">=14.0.0", "gel": ">=2", "knex": "*", "kysely": "*", "mysql2": ">=2", "pg": ">=8", "postgres": ">=3", "sql.js": ">=1", "sqlite3": ">=5" }, "optionalPeers": ["@aws-sdk/client-rds-data", "@cloudflare/workers-types", "@electric-sql/pglite", "@libsql/client", "@libsql/client-wasm", "@neondatabase/serverless", "@op-engineering/op-sqlite", "@opentelemetry/api", "@planetscale/database", "@prisma/client", "@tidbcloud/serverless", "@types/better-sqlite3", "@types/pg", "@types/sql.js", "@upstash/redis", "@vercel/postgres", "@xata.io/client", "better-sqlite3", "bun-types", "expo-sqlite", "gel", "knex", "kysely", "mysql2", "pg", "postgres", "sql.js", "sqlite3"] }, "sha512-jBe37K7d8ZSKptdKfakQFdeljtu3P2Cbo7tJoJSVZADzIKOBo9IAJPOmMsH2bZl90bZgh8FQlD8BjxXA/zuBkQ=="], + + "hono": ["hono@4.9.8", "", {}, "sha512-JW8Bb4RFWD9iOKxg5PbUarBYGM99IcxFl2FPBo2gSJO11jjUDqlP1Bmfyqt8Z/dGhIQ63PMA9LdcLefXyIasyg=="], + + "jose": ["jose@5.10.0", "", {}, "sha512-s+3Al/p9g32Iq+oqXxkW//7jk2Vig6FF1CFqzVXoTUXt2qz89YWbL+OwS17NFYEvxC35n0FKeGO2LGYSxeM2Gg=="], + + "postgres": ["postgres@3.4.7", "", {}, "sha512-Jtc2612XINuBjIl/QTWsV5UvE8UHuNblcO3vVADSrKsrc6RqGX6lOW1cEo3CM2v0XG4Nat8nI+YM7/f26VxXLw=="], + + "undici-types": ["undici-types@7.12.0", "", {}, "sha512-goOacqME2GYyOZZfb5Lgtu+1IDmAlAEu5xnD3+xTzS10hT0vzpf0SPjkXwAw9Jm+4n/mQGDP3LO8CPbYROeBfQ=="], + } +} diff --git a/package.json b/package.json new file mode 100644 index 0000000..54b00a5 --- /dev/null +++ b/package.json @@ -0,0 +1,24 @@ +{ + "name": "listee-libs", + "version": "0.0.0", + "private": true, + "packageManager": "bun@1.2.22", + "catalog": { + "drizzle-orm": "^0.44.5", + "hono": "^4.4.4", + "postgres": "^3.4.7", + "jose": "^5.2.3" + }, + "workspaces": [ + "packages/*" + ], + "scripts": { + "lint": "biome check", + "lint:fix": "biome check --write", + "test": "bun test" + }, + "devDependencies": { + "@biomejs/biome": "2.2.4", + "@types/bun": "1.2.22" + } +} diff --git a/packages/api/package.json b/packages/api/package.json new file mode 100644 index 0000000..e78a2c1 --- /dev/null +++ b/packages/api/package.json @@ -0,0 +1,22 @@ +{ + "name": "@listee/api", + "version": "0.0.0", + "type": "module", + "main": "./dist/index.js", + "module": "./dist/index.js", + "types": "./dist/index.d.ts", + "sideEffects": false, + "files": [ + "dist" + ], + "scripts": { + "build": "tsc --project tsconfig.build.json", + "clean": "rm -rf dist" + }, + "dependencies": { + "@listee/auth": "workspace:*", + "@listee/db": "workspace:*", + "@listee/types": "workspace:*", + "hono": "catalog:" + } +} diff --git a/packages/api/src/app.test.ts b/packages/api/src/app.test.ts new file mode 100644 index 0000000..9559845 --- /dev/null +++ b/packages/api/src/app.test.ts @@ -0,0 +1,277 @@ +import { describe, expect, test } from "bun:test"; +import { createHeaderAuthentication } from "@listee/auth"; +import type { + Category, + CategoryQueries, + ListCategoriesResult, + Task, + TaskQueries, +} from "@listee/types"; +import { createApp } from "./app"; + +function createRequest(path: string, init: RequestInit = {}): Request { + return new Request(`http://localhost${path}`, init); +} + +describe("health routes", () => { + test("returns ok status", async () => { + const app = createApp(); + const response = await app.fetch(createRequest("/healthz")); + const body = await response.json(); + + expect(response.status).toBe(200); + expect(body.status).toBe("ok"); + }); + + test("returns unknown when database checker is missing", async () => { + const app = createApp(); + const response = await app.fetch(createRequest("/healthz/database")); + const body = await response.json(); + + expect(response.status).toBe(200); + expect(body.status).toBe("unknown"); + }); + + test("returns ok when database checker succeeds", async () => { + const app = createApp({ + databaseHealth: async () => ({ ok: true }), + }); + + const response = await app.fetch(createRequest("/healthz/database")); + const body = await response.json(); + + expect(response.status).toBe(200); + expect(body.status).toBe("ok"); + }); + + test("returns error when database checker fails", async () => { + const app = createApp({ + databaseHealth: async () => ({ ok: false, error: "connection failed" }), + }); + + const response = await app.fetch(createRequest("/healthz/database")); + const body = await response.json(); + + expect(response.status).toBe(503); + expect(body.status).toBe("error"); + expect(body.error).toBe("connection failed"); + }); +}); + +describe("category routes", () => { + const { categoryQueries, categories } = createCategoryQueries(); + + test("lists categories for a user", async () => { + const authentication = createHeaderAuthentication(); + const app = createApp({ categoryQueries, authentication }); + + const response = await app.fetch( + createRequest("/users/user-1/categories?limit=1", { + headers: { Authorization: "Bearer user-1" }, + }), + ); + const body = await response.json(); + + expect(response.status).toBe(200); + expect(Array.isArray(body.data)).toBe(true); + expect(body.data).toHaveLength(1); + expect(body.meta.hasMore).toBe(true); + expect(body.meta.nextCursor).toBe(categories[0].createdAt.toISOString()); + }); + + test("rejects invalid limit", async () => { + const authentication = createHeaderAuthentication(); + const app = createApp({ categoryQueries, authentication }); + + const response = await app.fetch( + createRequest("/users/user-1/categories?limit=-1", { + headers: { Authorization: "Bearer user-1" }, + }), + ); + const body = await response.json(); + + expect(response.status).toBe(400); + expect(body.error).toBe("Invalid limit parameter"); + }); + + test("finds category by id", async () => { + const authentication = createHeaderAuthentication(); + const app = createApp({ categoryQueries, authentication }); + const target = categories[0]; + + const response = await app.fetch( + createRequest(`/categories/${target.id}`, { + headers: { Authorization: `Bearer ${target.createdBy}` }, + }), + ); + const body = await response.json(); + + expect(response.status).toBe(200); + expect(body.data.id).toBe(target.id); + }); + + test("returns 404 when category is missing", async () => { + const authentication = createHeaderAuthentication(); + const app = createApp({ categoryQueries, authentication }); + + const response = await app.fetch( + createRequest("/categories/unknown", { + headers: { Authorization: "Bearer user-1" }, + }), + ); + expect(response.status).toBe(404); + }); +}); + +describe("task routes", () => { + const { taskQueries, tasks } = createTaskQueries(); + + test("lists tasks for a category", async () => { + const authentication = createHeaderAuthentication(); + const app = createApp({ taskQueries, authentication }); + const categoryId = tasks[0].categoryId; + + const response = await app.fetch( + createRequest(`/categories/${categoryId}/tasks`, { + headers: { Authorization: `Bearer ${tasks[0].createdBy}` }, + }), + ); + const body = await response.json(); + + expect(response.status).toBe(200); + expect(Array.isArray(body.data)).toBe(true); + expect(body.data).toHaveLength(1); + }); + + test("finds task by id", async () => { + const authentication = createHeaderAuthentication(); + const app = createApp({ taskQueries, authentication }); + const target = tasks[0]; + + const response = await app.fetch( + createRequest(`/tasks/${target.id}`, { + headers: { Authorization: `Bearer ${target.createdBy}` }, + }), + ); + const body = await response.json(); + + expect(response.status).toBe(200); + expect(body.data.id).toBe(target.id); + }); + + test("returns 404 when task is missing", async () => { + const authentication = createHeaderAuthentication(); + const app = createApp({ taskQueries, authentication }); + + const response = await app.fetch( + createRequest("/tasks/unknown", { + headers: { Authorization: "Bearer user-1" }, + }), + ); + expect(response.status).toBe(404); + }); +}); + +function createCategoryQueries(): { + readonly categoryQueries: CategoryQueries; + readonly categories: readonly Category[]; +} { + const categories: Category[] = [ + createCategory({ + id: "category-1", + createdAt: new Date("2024-01-03T00:00:00Z"), + }), + createCategory({ + id: "category-2", + createdAt: new Date("2024-01-02T00:00:00Z"), + }), + createCategory({ + id: "category-3", + createdAt: new Date("2024-01-01T00:00:00Z"), + }), + ]; + + const categoryQueries: CategoryQueries = { + listByUserId: async ({ userId, limit = 20 }) => { + const items = categories + .filter((category) => category.createdBy === userId) + .sort((a, b) => b.createdAt.getTime() - a.createdAt.getTime()); + + const selected = items.slice(0, limit); + const hasMore = items.length > selected.length; + const nextCursor = hasMore + ? (selected[selected.length - 1]?.createdAt.toISOString() ?? null) + : null; + + return { + items: selected, + nextCursor, + hasMore, + } satisfies ListCategoriesResult; + }, + findById: async ({ categoryId }) => { + const category = categories.find((item) => item.id === categoryId); + return category ?? null; + }, + }; + + return { categoryQueries, categories }; +} + +function createTaskQueries(): { + readonly taskQueries: TaskQueries; + readonly tasks: readonly Task[]; +} { + const tasks: Task[] = [ + createTask({ id: "task-1", categoryId: "category-1" }), + createTask({ id: "task-2", categoryId: "category-2" }), + ]; + + const taskQueries: TaskQueries = { + listByCategory: async ({ categoryId }) => + tasks.filter((task) => task.categoryId === categoryId), + findById: async ({ taskId }) => { + const task = tasks.find((item) => item.id === taskId); + return task ?? null; + }, + }; + + return { taskQueries, tasks }; +} + +interface CategoryOptions { + readonly id: string; + readonly createdAt: Date; +} + +function createCategory(options: CategoryOptions): Category { + return { + id: options.id, + name: `Category ${options.id}`, + kind: "user", + createdBy: "user-1", + updatedBy: "user-1", + createdAt: options.createdAt, + updatedAt: options.createdAt, + }; +} + +interface TaskOptions { + readonly id: string; + readonly categoryId: string; +} + +function createTask(options: TaskOptions): Task { + const timestamp = new Date("2024-01-01T00:00:00Z"); + return { + id: options.id, + name: `Task ${options.id}`, + description: null, + isChecked: false, + categoryId: options.categoryId, + createdBy: "user-1", + updatedBy: "user-1", + createdAt: timestamp, + updatedAt: timestamp, + }; +} diff --git a/packages/api/src/app.ts b/packages/api/src/app.ts new file mode 100644 index 0000000..f733cf1 --- /dev/null +++ b/packages/api/src/app.ts @@ -0,0 +1,35 @@ +import type { AppDependencies } from "@listee/types"; +import { Hono } from "hono"; +import { registerCategoryRoutes } from "./routes/categories"; +import { registerHealthRoutes } from "./routes/health"; +import { registerTaskRoutes } from "./routes/tasks"; + +export function createApp(dependencies: AppDependencies = {}): Hono { + const app = new Hono(); + + registerHealthRoutes(app, { databaseHealth: dependencies.databaseHealth }); + registerCategoryRoutes(app, { + queries: dependencies.categoryQueries, + authentication: dependencies.authentication, + }); + registerTaskRoutes(app, { + queries: dependencies.taskQueries, + authentication: dependencies.authentication, + }); + + return app; +} + +type AppFetch = ReturnType["fetch"]; + +export function createFetchHandler( + dependencies: AppDependencies = {}, +): ( + request: Request, + env?: Parameters[1], + executionContext?: Parameters[2], +) => Promise { + const app = createApp(dependencies); + return async (request, env, executionContext) => + await app.fetch(request, env, executionContext); +} diff --git a/packages/api/src/index.ts b/packages/api/src/index.ts new file mode 100644 index 0000000..5dbaae6 --- /dev/null +++ b/packages/api/src/index.ts @@ -0,0 +1,23 @@ +export type { + AppDependencies, + CategoryQueries, + DatabaseHealthChecker, + DatabaseHealthStatus, + FindCategoryParams, + FindTaskParams, + ListCategoriesParams, + ListCategoriesResult, + ListTasksParams, + TaskQueries, +} from "@listee/types"; +export { createApp, createFetchHandler } from "./app.js"; +export { createDatabaseHealthChecker } from "./infrastructure/database-health.js"; +export { createCategoryQueries } from "./queries/category-queries.js"; +export { createTaskQueries } from "./queries/task-queries.js"; +export { createCategoryRepository } from "./repositories/category-repository.js"; +export { createTaskRepository } from "./repositories/task-repository.js"; +export { registerCategoryRoutes } from "./routes/categories.js"; +export { registerHealthRoutes } from "./routes/health.js"; +export { registerTaskRoutes } from "./routes/tasks.js"; +export { createCategoryService } from "./services/category-service.js"; +export { createTaskService } from "./services/task-service.js"; diff --git a/packages/api/src/infrastructure/database-health.ts b/packages/api/src/infrastructure/database-health.ts new file mode 100644 index 0000000..017869c --- /dev/null +++ b/packages/api/src/infrastructure/database-health.ts @@ -0,0 +1,20 @@ +import type { Database } from "@listee/db"; +import { sql } from "@listee/db"; +import type { DatabaseHealthChecker } from "@listee/types"; +import { toErrorMessage } from "../utils/error"; + +export function createDatabaseHealthChecker( + db: Database, +): DatabaseHealthChecker { + return async () => { + try { + await db.execute(sql`select 1`); + return { ok: true }; + } catch (error) { + return { + ok: false, + error: toErrorMessage(error), + }; + } + }; +} diff --git a/packages/api/src/queries/category-queries.ts b/packages/api/src/queries/category-queries.ts new file mode 100644 index 0000000..2bed5e9 --- /dev/null +++ b/packages/api/src/queries/category-queries.ts @@ -0,0 +1,32 @@ +import type { + CategoryQueries, + CategoryQueriesDependencies, + FindCategoryParams, + ListCategoriesParams, +} from "@listee/types"; + +export function createCategoryQueries( + dependencies: CategoryQueriesDependencies, +): CategoryQueries { + async function listByUserId(params: ListCategoriesParams) { + const limit = params.limit ?? 20; + + return dependencies.service.listByUserId({ + userId: params.userId, + limit, + cursor: params.cursor, + }); + } + + async function findById(params: FindCategoryParams) { + return dependencies.service.findById({ + categoryId: params.categoryId, + userId: params.userId, + }); + } + + return { + listByUserId, + findById, + }; +} diff --git a/packages/api/src/queries/task-queries.ts b/packages/api/src/queries/task-queries.ts new file mode 100644 index 0000000..fc09ee7 --- /dev/null +++ b/packages/api/src/queries/task-queries.ts @@ -0,0 +1,29 @@ +import type { + FindTaskParams, + ListTasksParams, + TaskQueries, + TaskQueriesDependencies, +} from "@listee/types"; + +export function createTaskQueries( + dependencies: TaskQueriesDependencies, +): TaskQueries { + async function listByCategory(params: ListTasksParams) { + return dependencies.service.listByCategory({ + categoryId: params.categoryId, + userId: params.userId, + }); + } + + async function findById(params: FindTaskParams) { + return dependencies.service.findById({ + taskId: params.taskId, + userId: params.userId, + }); + } + + return { + listByCategory, + findById, + }; +} diff --git a/packages/api/src/repositories/category-repository.ts b/packages/api/src/repositories/category-repository.ts new file mode 100644 index 0000000..1bb714c --- /dev/null +++ b/packages/api/src/repositories/category-repository.ts @@ -0,0 +1,136 @@ +import { Buffer } from "node:buffer"; +import type { Database } from "@listee/db"; +import { categories } from "@listee/db"; +import type { + Category, + CategoryRepository, + FindCategoryRepositoryParams, + ListCategoriesRepositoryParams, + PaginatedResult, +} from "@listee/types"; +import { and, desc, eq, lt, or } from "drizzle-orm"; + +interface CategoryCursorPayload { + readonly createdAt: string; + readonly id: string; +} + +interface CategoryCursor { + readonly createdAt: Date; + readonly id: string; +} + +function parseCursor(value: string | null | undefined): CategoryCursor | null { + if (value === undefined || value === null || value.length === 0) { + return null; + } + + try { + const decoded = Buffer.from(value, "base64url").toString("utf8"); + const payload = JSON.parse(decoded) as CategoryCursorPayload; + if ( + typeof payload.createdAt !== "string" || + typeof payload.id !== "string" + ) { + return null; + } + + const createdAt = new Date(payload.createdAt); + if (Number.isNaN(createdAt.getTime())) { + return null; + } + + return { createdAt, id: payload.id }; + } catch { + return null; + } +} + +export function createCategoryRepository(db: Database): CategoryRepository { + async function listByUserId( + params: ListCategoriesRepositoryParams, + ): Promise> { + const baseCondition = eq(categories.createdBy, params.userId); + const cursor = parseCursor(params.cursor); + const condition = + cursor === null + ? baseCondition + : and( + baseCondition, + or( + lt(categories.createdAt, cursor.createdAt), + and( + eq(categories.createdAt, cursor.createdAt), + lt(categories.id, cursor.id), + ), + ), + ); + + const rawLimit = Math.trunc(params.limit); + if (!Number.isFinite(rawLimit) || rawLimit <= 0) { + return { + items: [], + nextCursor: null, + hasMore: false, + }; + } + + const limit = rawLimit; + const rows = await db + .select() + .from(categories) + .where(condition) + .orderBy(desc(categories.createdAt), desc(categories.id)) + .limit(limit + 1); + + const hasMore = rows.length > limit; + const items = hasMore ? rows.slice(0, limit) : rows; + let nextCursor: string | null = null; + + if (hasMore) { + const lastItem = items[items.length - 1]; + if (lastItem !== undefined) { + const payload: CategoryCursorPayload = { + createdAt: lastItem.createdAt.toISOString(), + id: lastItem.id, + }; + + nextCursor = Buffer.from(JSON.stringify(payload), "utf8").toString( + "base64url", + ); + } + } + + return { + items, + nextCursor, + hasMore, + }; + } + + async function findById( + params: FindCategoryRepositoryParams, + ): Promise { + const rows = await db + .select() + .from(categories) + .where(eq(categories.id, params.categoryId)) + .limit(1); + + const category = rows[0]; + if (category === undefined) { + return null; + } + + if (params.userId !== undefined && category.createdBy !== params.userId) { + return null; + } + + return category; + } + + return { + listByUserId, + findById, + }; +} diff --git a/packages/api/src/repositories/task-repository.ts b/packages/api/src/repositories/task-repository.ts new file mode 100644 index 0000000..50bf10c --- /dev/null +++ b/packages/api/src/repositories/task-repository.ts @@ -0,0 +1,52 @@ +import type { Database } from "@listee/db"; +import { tasks } from "@listee/db"; +import type { + FindTaskRepositoryParams, + ListTasksRepositoryParams, + Task, + TaskRepository, +} from "@listee/types"; +import { eq } from "drizzle-orm"; + +export function createTaskRepository(db: Database): TaskRepository { + async function listByCategory( + params: ListTasksRepositoryParams, + ): Promise { + const rows = await db + .select() + .from(tasks) + .where(eq(tasks.categoryId, params.categoryId)); + + if (params.userId === undefined) { + return rows; + } + + return rows.filter((task) => task.createdBy === params.userId); + } + + async function findById( + params: FindTaskRepositoryParams, + ): Promise { + const rows = await db + .select() + .from(tasks) + .where(eq(tasks.id, params.taskId)) + .limit(1); + + const task = rows[0]; + if (task === undefined) { + return null; + } + + if (params.userId !== undefined && task.createdBy !== params.userId) { + return null; + } + + return task; + } + + return { + listByCategory, + findById, + }; +} diff --git a/packages/api/src/routes/auth-utils.ts b/packages/api/src/routes/auth-utils.ts new file mode 100644 index 0000000..8136fc0 --- /dev/null +++ b/packages/api/src/routes/auth-utils.ts @@ -0,0 +1,20 @@ +import { AuthenticationError } from "@listee/auth"; +import type { + AuthenticationProvider, + AuthenticationResult, +} from "@listee/types"; + +export async function tryAuthenticate( + provider: AuthenticationProvider, + request: Request, +): Promise { + try { + return await provider.authenticate({ request }); + } catch (error) { + if (error instanceof AuthenticationError) { + return null; + } + + throw error; + } +} diff --git a/packages/api/src/routes/categories.ts b/packages/api/src/routes/categories.ts new file mode 100644 index 0000000..ce96bc2 --- /dev/null +++ b/packages/api/src/routes/categories.ts @@ -0,0 +1,124 @@ +import type { RegisterCategoryRoutesOptions } from "@listee/types"; +import type { Hono } from "hono"; +import { tryAuthenticate } from "./auth-utils.js"; + +interface CategoryResponse { + readonly id: string; + readonly name: string; + readonly kind: string; + readonly createdBy: string; + readonly updatedBy: string; + readonly createdAt: string; + readonly updatedAt: string; +} + +interface ListCategoriesResponse { + readonly data: readonly CategoryResponse[]; + readonly meta: { + readonly nextCursor: string | null; + readonly hasMore: boolean; + }; +} + +function toCategoryResponse(category: { + readonly id: string; + readonly name: string; + readonly kind: string; + readonly createdBy: string; + readonly updatedBy: string; + readonly createdAt: Date; + readonly updatedAt: Date; +}): CategoryResponse { + return { + id: category.id, + name: category.name, + kind: category.kind, + createdBy: category.createdBy, + updatedBy: category.updatedBy, + createdAt: category.createdAt.toISOString(), + updatedAt: category.updatedAt.toISOString(), + }; +} + +function parsePositiveInteger(value: string | undefined): number | undefined { + if (value === undefined) { + return undefined; + } + + if (!/^[1-9]\d*$/.test(value)) { + return undefined; + } + + const parsed = Number.parseInt(value, 10); + if (parsed <= 0) { + return undefined; + } + + return parsed; +} + +export function registerCategoryRoutes( + app: Hono, + options: RegisterCategoryRoutesOptions = {}, +): void { + const queries = options.queries; + const authentication = options.authentication; + + if (queries === undefined || authentication === undefined) { + return; + } + + app.get("/users/:userId/categories", async (context) => { + const authResult = await tryAuthenticate(authentication, context.req.raw); + if (authResult === null) { + return context.json({ error: "Unauthorized" }, 401); + } + const userId = context.req.param("userId"); + + if (authResult.user.id !== userId) { + return context.json({ error: "Forbidden" }, 403); + } + + const limitParam = context.req.query("limit"); + const cursor = context.req.query("cursor") ?? null; + const limit = parsePositiveInteger(limitParam); + + if (limitParam !== undefined && limit === undefined) { + return context.json({ error: "Invalid limit parameter" }, 400); + } + + const result = await queries.listByUserId({ + userId, + limit, + cursor, + }); + + const response: ListCategoriesResponse = { + data: result.items.map((category) => toCategoryResponse(category)), + meta: { + nextCursor: result.nextCursor, + hasMore: result.hasMore, + }, + }; + + return context.json(response); + }); + + app.get("/categories/:categoryId", async (context) => { + const authResult = await tryAuthenticate(authentication, context.req.raw); + if (authResult === null) { + return context.json({ error: "Unauthorized" }, 401); + } + const categoryId = context.req.param("categoryId"); + + const category = await queries.findById({ + categoryId, + userId: authResult.user.id, + }); + if (category === null) { + return context.json({ error: "Not Found" }, 404); + } + + return context.json({ data: toCategoryResponse(category) }); + }); +} diff --git a/packages/api/src/routes/health.ts b/packages/api/src/routes/health.ts new file mode 100644 index 0000000..355bf81 --- /dev/null +++ b/packages/api/src/routes/health.ts @@ -0,0 +1,48 @@ +import type { RegisterHealthRoutesOptions } from "@listee/types"; +import type { Hono } from "hono"; +import { toErrorMessage } from "../utils/error"; + +export function registerHealthRoutes( + app: Hono, + options: RegisterHealthRoutesOptions = {}, +): void { + app.get("/healthz", (context) => { + return context.json({ status: "ok" }); + }); + + app.get("/healthz/database", async (context) => { + const checker = options.databaseHealth; + if (!checker) { + return context.json({ status: "unknown" }, 200); + } + + try { + const result = await checker(); + if (result.ok) { + return context.json({ status: "ok" }); + } + + return context.json( + { + status: "error", + error: result.error ?? "Database check failed", + }, + 503, + ); + } catch (error) { + console.error("Database health check failed", error); + const isProduction = process.env.NODE_ENV === "production"; + const errorMessage = isProduction + ? "Internal server error" + : toErrorMessage(error); + + return context.json( + { + status: "error", + error: errorMessage, + }, + 500, + ); + } + }); +} diff --git a/packages/api/src/routes/tasks.ts b/packages/api/src/routes/tasks.ts new file mode 100644 index 0000000..66efbfd --- /dev/null +++ b/packages/api/src/routes/tasks.ts @@ -0,0 +1,85 @@ +import type { RegisterTaskRoutesOptions } from "@listee/types"; +import type { Hono } from "hono"; +import { tryAuthenticate } from "./auth-utils.js"; + +interface TaskResponse { + readonly id: string; + readonly name: string; + readonly description: string | null; + readonly isChecked: boolean; + readonly categoryId: string; + readonly createdBy: string; + readonly updatedBy: string; + readonly createdAt: string; + readonly updatedAt: string; +} + +function toTaskResponse(task: { + readonly id: string; + readonly name: string; + readonly description: string | null; + readonly isChecked: boolean; + readonly categoryId: string; + readonly createdBy: string; + readonly updatedBy: string; + readonly createdAt: Date; + readonly updatedAt: Date; +}): TaskResponse { + return { + id: task.id, + name: task.name, + description: task.description, + isChecked: task.isChecked, + categoryId: task.categoryId, + createdBy: task.createdBy, + updatedBy: task.updatedBy, + createdAt: task.createdAt.toISOString(), + updatedAt: task.updatedAt.toISOString(), + }; +} + +export function registerTaskRoutes( + app: Hono, + options: RegisterTaskRoutesOptions = {}, +): void { + const queries = options.queries; + const authentication = options.authentication; + + if (queries === undefined || authentication === undefined) { + return; + } + + app.get("/categories/:categoryId/tasks", async (context) => { + const authResult = await tryAuthenticate(authentication, context.req.raw); + if (authResult === null) { + return context.json({ error: "Unauthorized" }, 401); + } + + const categoryId = context.req.param("categoryId"); + + const tasks = await queries.listByCategory({ + categoryId, + userId: authResult.user.id, + }); + return context.json({ data: tasks.map((task) => toTaskResponse(task)) }); + }); + + app.get("/tasks/:taskId", async (context) => { + const authResult = await tryAuthenticate(authentication, context.req.raw); + if (authResult === null) { + return context.json({ error: "Unauthorized" }, 401); + } + + const taskId = context.req.param("taskId"); + + const task = await queries.findById({ + taskId, + userId: authResult.user.id, + }); + if (task === null) { + return context.json({ error: "Not Found" }, 404); + } + + return context.json({ data: toTaskResponse(task) }); + }); +} diff --git a/packages/api/src/services/category-service.ts b/packages/api/src/services/category-service.ts new file mode 100644 index 0000000..ed5d731 --- /dev/null +++ b/packages/api/src/services/category-service.ts @@ -0,0 +1,29 @@ +import type { + Category, + CategoryService, + CategoryServiceDependencies, + FindCategoryRepositoryParams, + ListCategoriesRepositoryParams, + PaginatedResult, +} from "@listee/types"; + +export function createCategoryService( + dependencies: CategoryServiceDependencies, +): CategoryService { + async function listByUserId( + params: ListCategoriesRepositoryParams, + ): Promise> { + return dependencies.repository.listByUserId(params); + } + + async function findById( + params: FindCategoryRepositoryParams, + ): Promise { + return dependencies.repository.findById(params); + } + + return { + listByUserId, + findById, + }; +} diff --git a/packages/api/src/services/task-service.ts b/packages/api/src/services/task-service.ts new file mode 100644 index 0000000..da663cc --- /dev/null +++ b/packages/api/src/services/task-service.ts @@ -0,0 +1,28 @@ +import type { + FindTaskRepositoryParams, + ListTasksRepositoryParams, + Task, + TaskService, + TaskServiceDependencies, +} from "@listee/types"; + +export function createTaskService( + dependencies: TaskServiceDependencies, +): TaskService { + async function listByCategory( + params: ListTasksRepositoryParams, + ): Promise { + return dependencies.repository.listByCategory(params); + } + + async function findById( + params: FindTaskRepositoryParams, + ): Promise { + return dependencies.repository.findById(params); + } + + return { + listByCategory, + findById, + }; +} diff --git a/packages/api/src/utils/error.ts b/packages/api/src/utils/error.ts new file mode 100644 index 0000000..903ea5c --- /dev/null +++ b/packages/api/src/utils/error.ts @@ -0,0 +1,21 @@ +export function toErrorMessage(value: unknown): string { + if (value instanceof Error && typeof value.message === "string") { + return value.message; + } + + if (typeof value === "string") { + return value; + } + + try { + const serialized = JSON.stringify(value); + + if (typeof serialized === "string") { + return serialized; + } + + return "Unknown error"; + } catch { + return "Unknown error"; + } +} diff --git a/packages/api/tsconfig.build.json b/packages/api/tsconfig.build.json new file mode 100644 index 0000000..f1d7e20 --- /dev/null +++ b/packages/api/tsconfig.build.json @@ -0,0 +1,4 @@ +{ + "extends": "./tsconfig.json", + "exclude": ["dist", "src/**/*.test.ts"] +} diff --git a/packages/api/tsconfig.json b/packages/api/tsconfig.json new file mode 100644 index 0000000..b66f213 --- /dev/null +++ b/packages/api/tsconfig.json @@ -0,0 +1,13 @@ +{ + "extends": "../../tsconfig.json", + "compilerOptions": { + "declaration": true, + "declarationMap": true, + "outDir": "dist", + "rootDirs": ["src", "../db/src", "../auth/src", "../types/src"], + "verbatimModuleSyntax": true, + "types": ["@types/bun"] + }, + "include": ["src/**/*.ts", "src/**/*.d.ts"], + "exclude": ["dist"] +} diff --git a/packages/auth/package.json b/packages/auth/package.json new file mode 100644 index 0000000..6579a2f --- /dev/null +++ b/packages/auth/package.json @@ -0,0 +1,20 @@ +{ + "name": "@listee/auth", + "version": "0.0.0", + "type": "module", + "main": "./dist/index.js", + "module": "./dist/index.js", + "types": "./dist/index.d.ts", + "sideEffects": false, + "files": [ + "dist" + ], + "scripts": { + "build": "tsc --project tsconfig.build.json", + "clean": "rm -rf dist" + }, + "dependencies": { + "@listee/types": "workspace:^", + "jose": "catalog:" + } +} diff --git a/packages/auth/src/authentication/errors.ts b/packages/auth/src/authentication/errors.ts new file mode 100644 index 0000000..99502f4 --- /dev/null +++ b/packages/auth/src/authentication/errors.ts @@ -0,0 +1,6 @@ +export class AuthenticationError extends Error { + constructor(message: string) { + super(message); + this.name = "AuthenticationError"; + } +} diff --git a/packages/auth/src/authentication/header.ts b/packages/auth/src/authentication/header.ts new file mode 100644 index 0000000..5cc9795 --- /dev/null +++ b/packages/auth/src/authentication/header.ts @@ -0,0 +1,36 @@ +import type { + AuthenticationContext, + AuthenticationProvider, + AuthenticationResult, + HeaderAuthenticationOptions, + HeaderToken, +} from "@listee/types"; +import { extractAuthorizationToken } from "./shared.js"; + +export function createHeaderAuthentication( + options: HeaderAuthenticationOptions = {}, +): AuthenticationProvider { + const headerName = options.headerName ?? "authorization"; + const scheme = options.scheme ?? "Bearer"; + + async function authenticate( + context: AuthenticationContext, + ): Promise { + const tokenValue = extractAuthorizationToken(context, headerName, scheme); + + const token: HeaderToken = { + type: "header", + scheme, + value: tokenValue, + }; + + return { + user: { + id: tokenValue, + token, + }, + }; + } + + return { authenticate }; +} diff --git a/packages/auth/src/authentication/index.ts b/packages/auth/src/authentication/index.ts new file mode 100644 index 0000000..57ef1e1 --- /dev/null +++ b/packages/auth/src/authentication/index.ts @@ -0,0 +1,3 @@ +export { AuthenticationError } from "./errors.js"; +export { createHeaderAuthentication } from "./header.js"; +export { createSupabaseAuthentication } from "./supabase.js"; diff --git a/packages/auth/src/authentication/shared.ts b/packages/auth/src/authentication/shared.ts new file mode 100644 index 0000000..5fef991 --- /dev/null +++ b/packages/auth/src/authentication/shared.ts @@ -0,0 +1,45 @@ +import type { AuthenticationContext } from "@listee/types"; +import { AuthenticationError } from "./errors.js"; + +function isString(value: unknown): value is string { + return typeof value === "string"; +} + +function isNonEmptyString(value: unknown): value is string { + if (!isString(value)) { + return false; + } + + return value.trim().length > 0; +} + +export function assertNonEmptyString(value: unknown, message: string): string { + if (!isNonEmptyString(value)) { + throw new AuthenticationError(message); + } + + return value; +} + +export function extractAuthorizationToken( + context: AuthenticationContext, + headerName: string, + scheme: string, +): string { + const headerValue = context.request.headers.get(headerName); + if (headerValue === null) { + throw new AuthenticationError("Missing authorization header"); + } + + const expectedPrefix = `${scheme} `; + if (!headerValue.startsWith(expectedPrefix)) { + throw new AuthenticationError("Invalid authorization scheme"); + } + + const tokenValue = headerValue.slice(expectedPrefix.length).trim(); + if (!isNonEmptyString(tokenValue)) { + throw new AuthenticationError("Missing token value"); + } + + return tokenValue; +} diff --git a/packages/auth/src/authentication/supabase.test.ts b/packages/auth/src/authentication/supabase.test.ts new file mode 100644 index 0000000..93f807f --- /dev/null +++ b/packages/auth/src/authentication/supabase.test.ts @@ -0,0 +1,197 @@ +import { describe, expect, test } from "bun:test"; +import type { + AuthenticatedToken, + SupabaseAuthenticationOptions, + SupabaseToken, +} from "@listee/types"; +import { exportJWK, generateKeyPair, SignJWT } from "jose"; +import { AuthenticationError, createSupabaseAuthentication } from "./index.js"; + +describe("createSupabaseAuthentication", () => { + test("returns user when token is valid", async () => { + const helper = await createSupabaseTestHelper({ + audience: "authenticated", + requiredRole: "authenticated", + }); + try { + const token = await helper.signToken({ + subject: "user-123", + role: "authenticated", + audience: "authenticated", + }); + + const request = new Request("https://example.com/api", { + headers: { + Authorization: `Bearer ${token}`, + }, + }); + + const result = await helper.provider.authenticate({ request }); + + assertSupabaseToken(result.user.token); + expect(result.user.id).toBe("user-123"); + expect(result.user.token.sub).toBe("user-123"); + expect(result.user.token.role).toBe("authenticated"); + } finally { + helper.restore(); + } + }); + + test("throws when role requirement is not met", async () => { + const helper = await createSupabaseTestHelper({ + audience: "authenticated", + requiredRole: "service_role", + }); + try { + const token = await helper.signToken({ + subject: "user-456", + role: "authenticated", + audience: "authenticated", + }); + + const request = new Request("https://example.com/api", { + headers: { + Authorization: `Bearer ${token}`, + }, + }); + + await expect(helper.provider.authenticate({ request })).rejects.toThrow( + AuthenticationError, + ); + } finally { + helper.restore(); + } + }); + + test("throws when authorization header is missing", async () => { + const helper = await createSupabaseTestHelper({ + audience: "authenticated", + requiredRole: "authenticated", + }); + try { + const request = new Request("https://example.com/api"); + + await expect(helper.provider.authenticate({ request })).rejects.toThrow( + AuthenticationError, + ); + } finally { + helper.restore(); + } + }); +}); + +interface SupabaseTestHelperConfig { + readonly audience?: SupabaseAuthenticationOptions["audience"]; + readonly projectUrl?: SupabaseAuthenticationOptions["projectUrl"]; + readonly requiredRole?: SupabaseAuthenticationOptions["requiredRole"]; + readonly clockToleranceSeconds?: SupabaseAuthenticationOptions["clockToleranceSeconds"]; +} + +interface SupabaseTestHelper { + readonly provider: ReturnType; + signToken(config: { + readonly subject: string; + readonly role: string; + readonly audience: string; + }): Promise; + restore(): void; +} + +async function createSupabaseTestHelper( + config: SupabaseTestHelperConfig, +): Promise { + const projectUrl = config.projectUrl ?? "https://example.supabase.co"; + const { publicKey, privateKey } = await generateKeyPair("RS256"); + const exportedJwk = await exportJWK(publicKey); + const jwk = { + ...exportedJwk, + use: "sig", + alg: "RS256", + kid: "test-key", + }; + + const keysUrl = `${projectUrl}/auth/v1/.well-known/jwks.json`; + const jwksBody = JSON.stringify({ keys: [jwk] }); + + const originalFetch = globalThis.fetch; + const mockFetch = Object.assign( + async ( + input: Parameters[0], + _init?: Parameters[1], + ): Promise => { + const requestUrl = resolveRequestUrl(input); + if (requestUrl === keysUrl) { + return new Response(jwksBody, { + status: 200, + headers: { + "content-type": "application/json", + }, + }); + } + + return new Response("Not Found", { status: 404 }); + }, + { preconnect: originalFetch.preconnect }, + ) satisfies typeof fetch; + + globalThis.fetch = mockFetch; + + const baseOptions: SupabaseAuthenticationOptions = { + projectUrl, + audience: config.audience, + requiredRole: config.requiredRole, + clockToleranceSeconds: config.clockToleranceSeconds, + }; + + const provider = createSupabaseAuthentication(baseOptions); + + async function signToken(input: { + readonly subject: string; + readonly role: string; + readonly audience: string; + }): Promise { + const jwt = await new SignJWT({ role: input.role }) + .setProtectedHeader({ alg: "RS256", kid: jwk.kid }) + .setSubject(input.subject) + .setAudience(input.audience) + .setIssuer(`${projectUrl}/auth/v1`) + .setIssuedAt() + .setExpirationTime("5m") + .sign(privateKey); + + return jwt; + } + + function restore(): void { + globalThis.fetch = originalFetch; + } + + return { provider, signToken, restore }; +} + +function assertSupabaseToken( + token: AuthenticatedToken, +): asserts token is SupabaseToken { + if ( + typeof token === "object" && + token !== null && + "sub" in token && + "role" in token + ) { + return; + } + + throw new Error("Expected Supabase token in authentication result"); +} + +function resolveRequestUrl(input: RequestInfo | URL): string { + if (typeof input === "string") { + return input; + } + + if (input instanceof URL) { + return input.toString(); + } + + return input.url; +} diff --git a/packages/auth/src/authentication/supabase.ts b/packages/auth/src/authentication/supabase.ts new file mode 100644 index 0000000..d28c273 --- /dev/null +++ b/packages/auth/src/authentication/supabase.ts @@ -0,0 +1,109 @@ +import type { + AuthenticationContext, + AuthenticationProvider, + AuthenticationResult, + SupabaseAuthenticationOptions, + SupabaseToken, +} from "@listee/types"; +import { createRemoteJWKSet, type JWTVerifyOptions, jwtVerify } from "jose"; +import { AuthenticationError } from "./errors.js"; +import { assertNonEmptyString, extractAuthorizationToken } from "./shared.js"; + +function parseSupabaseProjectUrl(value: string): URL { + const trimmed = value.trim(); + if (trimmed.length === 0) { + throw new Error("Supabase project URL is required"); + } + + try { + return new URL(trimmed); + } catch { + throw new Error("Supabase project URL must be a valid URL"); + } +} + +function buildSupabaseIssuerUrl(projectUrl: URL): string { + const issuerUrl = new URL("/auth/v1", projectUrl); + return issuerUrl.toString(); +} + +function buildSupabaseJwksUrl( + projectUrl: URL, + jwksPath: string | undefined, +): URL { + const normalizedPath = jwksPath ?? "/auth/v1/.well-known/jwks.json"; + return new URL(normalizedPath, projectUrl); +} + +function normalizeAudience( + audience: string | readonly string[] | undefined, +): string | string[] | undefined { + if (audience === undefined) { + return undefined; + } + + if (typeof audience === "string") { + return audience; + } + + return [...audience]; +} + +export function createSupabaseAuthentication( + options: SupabaseAuthenticationOptions, +): AuthenticationProvider { + const headerName = options.headerName ?? "authorization"; + const scheme = options.scheme ?? "Bearer"; + const projectUrl = parseSupabaseProjectUrl(options.projectUrl); + const issuer = (options.issuer ?? buildSupabaseIssuerUrl(projectUrl)).trim(); + const jwksUrl = buildSupabaseJwksUrl(projectUrl, options.jwksPath); + const audience = normalizeAudience(options.audience); + const requiredRole = options.requiredRole; + const clockTolerance = options.clockToleranceSeconds; + + const remoteJwkSet = createRemoteJWKSet(jwksUrl); + + async function authenticate( + context: AuthenticationContext, + ): Promise { + const tokenValue = extractAuthorizationToken(context, headerName, scheme); + const verifyOptions: JWTVerifyOptions = {}; + + if (issuer.length > 0) { + verifyOptions.issuer = issuer; + } + + if (audience !== undefined) { + verifyOptions.audience = audience; + } + + if (clockTolerance !== undefined) { + verifyOptions.clockTolerance = clockTolerance; + } + + const { payload } = await jwtVerify( + tokenValue, + remoteJwkSet, + verifyOptions, + ); + const subject = assertNonEmptyString(payload.sub, "Missing subject claim"); + + if (requiredRole !== undefined) { + const role = assertNonEmptyString(payload.role, "Missing role claim"); + if (role !== requiredRole) { + throw new AuthenticationError("Role not allowed"); + } + } + + const token: SupabaseToken = { ...payload }; + + return { + user: { + id: subject, + token, + }, + }; + } + + return { authenticate }; +} diff --git a/packages/auth/src/index.ts b/packages/auth/src/index.ts new file mode 100644 index 0000000..c2f700d --- /dev/null +++ b/packages/auth/src/index.ts @@ -0,0 +1,11 @@ +export type { + AuthenticatedUser, + AuthenticationContext, + AuthenticationProvider, + AuthenticationResult, +} from "@listee/types"; +export { + AuthenticationError, + createHeaderAuthentication, + createSupabaseAuthentication, +} from "./authentication/index.js"; diff --git a/packages/auth/tsconfig.build.json b/packages/auth/tsconfig.build.json new file mode 100644 index 0000000..02c50e7 --- /dev/null +++ b/packages/auth/tsconfig.build.json @@ -0,0 +1,5 @@ +{ + "extends": "./tsconfig.json", + "exclude": ["dist", "**/*.test.ts", "**/__tests__/**"], + "include": ["src/**/*.ts"] +} diff --git a/packages/auth/tsconfig.json b/packages/auth/tsconfig.json new file mode 100644 index 0000000..7c9ff31 --- /dev/null +++ b/packages/auth/tsconfig.json @@ -0,0 +1,11 @@ +{ + "extends": "../../tsconfig.json", + "compilerOptions": { + "declaration": true, + "declarationMap": true, + "outDir": "dist", + "verbatimModuleSyntax": true + }, + "include": ["src/**/*.ts"], + "exclude": ["dist"] +} diff --git a/packages/db/package.json b/packages/db/package.json new file mode 100644 index 0000000..6030b39 --- /dev/null +++ b/packages/db/package.json @@ -0,0 +1,20 @@ +{ + "name": "@listee/db", + "version": "0.0.0", + "type": "module", + "main": "./dist/index.js", + "module": "./dist/index.js", + "types": "./dist/index.d.ts", + "sideEffects": false, + "files": [ + "dist" + ], + "scripts": { + "build": "tsc --project tsconfig.build.json", + "clean": "rm -rf dist" + }, + "dependencies": { + "drizzle-orm": "catalog:", + "postgres": "catalog:" + } +} diff --git a/packages/db/src/index.test.ts b/packages/db/src/index.test.ts new file mode 100644 index 0000000..8d011f7 --- /dev/null +++ b/packages/db/src/index.test.ts @@ -0,0 +1,214 @@ +import { beforeAll, beforeEach, describe, expect, mock, test } from "bun:test"; + +// Import only the type information so the actual module code is evaluated lazily inside beforeAll. +// This ensures mocked dependencies are in place before ./index runs and binds to postgres/drizzle. +type ModuleExports = typeof import("./index"); +type CreatePostgresConnection = ModuleExports["createPostgresConnection"]; +type CreateRlsClient = ModuleExports["createRlsClient"]; + +interface PostgresCall { + url: unknown; + options: unknown; + connection: unknown; +} + +interface SqlStatement { + kind: "sql"; + strings: Array; + values: Array; +} + +interface RawFragment { + kind: "raw"; + value: unknown; +} + +interface TransactionRecord { + queries: Array; +} + +function renderSql(statement: SqlStatement): string { + return statement.strings.join(""); +} + +const postgresCalls: Array = []; +let connectionCounter = 0; + +const transactionRecords: Array = []; +const rawValues: Array = []; +let connectionNamespace = 0; + +function sqlTag( + strings: TemplateStringsArray, + ...values: Array +): SqlStatement { + return { + kind: "sql", + strings: Array.from(strings), + values, + } satisfies SqlStatement; +} + +sqlTag.raw = (value: unknown): RawFragment => { + rawValues.push(value); + return { + kind: "raw", + value, + } satisfies RawFragment; +}; + +mock.module("postgres", () => ({ + default: (url: unknown, options?: unknown) => { + connectionCounter += 1; + const connection = { + id: connectionCounter, + url, + options, + }; + postgresCalls.push({ url, options, connection }); + return connection; + }, +})); + +mock.module("drizzle-orm", () => ({ + sql: sqlTag, +})); + +mock.module("drizzle-orm/postgres-js", () => ({ + drizzle: () => { + return { + transaction: async ( + callback: (tx: { + execute: (query: SqlStatement) => Promise; + }) => Promise, + ): Promise => { + const record: TransactionRecord = { + queries: [], + }; + + const tx: { execute: (query: SqlStatement) => Promise } = { + execute: async (query: SqlStatement) => { + record.queries.push(query); + }, + }; + + transactionRecords.push(record); + return await callback(tx); + }, + } satisfies { + transaction: ( + callback: (tx: { + execute: (query: SqlStatement) => Promise; + }) => Promise, + ) => Promise; + }; + }, +})); + +let createPostgresConnection: CreatePostgresConnection; +let createRlsClient: CreateRlsClient; + +beforeAll(async () => { + process.env.POSTGRES_URL = "postgres://initial"; + const module = await import("./index"); + createPostgresConnection = module.createPostgresConnection; + createRlsClient = module.createRlsClient; +}); + +beforeEach(() => { + postgresCalls.splice(0, postgresCalls.length); + transactionRecords.splice(0, transactionRecords.length); + rawValues.splice(0, rawValues.length); + connectionNamespace += 1; + process.env.POSTGRES_URL = `postgres://test-${connectionNamespace}`; +}); + +describe("createPostgresConnection", () => { + test("reuses the cached connection", () => { + const first = createPostgresConnection(); + const second = createPostgresConnection(); + + expect(postgresCalls.length).toBe(1); + expect(second).toBe(first); + }); + + test("creates a new connection when reuseConnection is false", () => { + const initial = createPostgresConnection(); + const next = createPostgresConnection({ reuseConnection: false }); + + expect(postgresCalls.length).toBe(2); + expect(next).not.toBe(initial); + }); + + test("creates distinct cached connections per configuration", () => { + const first = createPostgresConnection({ + connectionString: `postgres://one-${connectionNamespace}`, + }); + const second = createPostgresConnection({ + connectionString: `postgres://two-${connectionNamespace}`, + }); + + expect(postgresCalls.length).toBe(2); + expect(second).not.toBe(first); + }); + + test("throws when no connection string can be resolved", () => { + process.env.POSTGRES_URL = ""; + + expect(() => { + createPostgresConnection({ reuseConnection: false }); + }).toThrow("POSTGRES_URL is not set."); + }); + + test("honors an explicit connection string", () => { + const explicit = `postgres://override-${connectionNamespace}`; + createPostgresConnection({ connectionString: explicit }); + + expect(postgresCalls[0]?.url).toBe(explicit); + }); +}); + +describe("createRlsClient", () => { + test("wraps RLS setup and teardown around the transaction", async () => { + const token = { + sub: "user-123", + role: "role-with-hyphen", + extra: "value", + }; + + const client = createRlsClient(token); + const result = await client.rls(async () => "done"); + + expect(result).toBe("done"); + expect(transactionRecords.length).toBe(1); + + const [record] = transactionRecords; + expect(record.queries.length).toBe(2); + + const [setupQuery, teardownQuery] = record.queries; + expect(setupQuery.values[0]).toBe(JSON.stringify(token)); + expect(setupQuery.values[1]).toBe(token.sub); + expect(renderSql(setupQuery)).toContain("set_config('request.jwt.claims'"); + expect(renderSql(setupQuery)).toContain( + "set_config('request.jwt.claim.sub'", + ); + + expect(renderSql(teardownQuery)).toContain( + "set_config('request.jwt.claims', NULL", + ); + expect(renderSql(teardownQuery)).toContain("reset role"); + expect(rawValues.at(-1)).toBe("anon"); + }); + + test("preserves a valid role value", async () => { + const token = { + sub: "user-999", + role: "editor", + }; + + const client = createRlsClient(token); + await client.rls(async () => undefined); + + expect(rawValues.at(-1)).toBe("editor"); + }); +}); diff --git a/packages/db/src/index.ts b/packages/db/src/index.ts new file mode 100644 index 0000000..93af2b0 --- /dev/null +++ b/packages/db/src/index.ts @@ -0,0 +1,218 @@ +import { sql } from "drizzle-orm"; +import { drizzle, type PostgresJsDatabase } from "drizzle-orm/postgres-js"; +import postgres, { type Options, type PostgresType, type Sql } from "postgres"; + +type DefaultTypeMap = Record; + +const GLOBAL_CACHE_PROPERTY = "__listeePgConnections" as const; + +type GlobalWithPgCache = typeof globalThis & { + [GLOBAL_CACHE_PROPERTY]?: Map; +}; + +export type PostgresConnection = Sql; + +export interface CreateConnectionOptions { + connectionString?: string; + postgresOptions?: Options; + reuseConnection?: boolean; +} + +function resolveConnectionString(options?: CreateConnectionOptions): string { + if (options?.connectionString && options.connectionString.length > 0) { + return options.connectionString; + } + + const envValue = process.env.POSTGRES_URL; + if (envValue && envValue.length > 0) { + return envValue; + } + + throw new Error("POSTGRES_URL is not set."); +} + +function shouldReuseConnection(options?: CreateConnectionOptions): boolean { + if (options?.reuseConnection !== undefined) { + return options.reuseConnection; + } + + return true; +} + +function createNewConnection( + connectionString: string, + options?: CreateConnectionOptions, +): PostgresConnection { + const baseOptions: Options = { + prepare: false, + ...(options?.postgresOptions ?? {}), + }; + + return postgres(connectionString, baseOptions); +} + +const localConnectionCache = new Map(); + +function createCacheKey( + connectionString: string, + options?: CreateConnectionOptions, +): string { + if (options?.postgresOptions === undefined) { + return connectionString; + } + + try { + return `${connectionString}|${JSON.stringify(options.postgresOptions)}`; + } catch { + return `${connectionString}|${String(options.postgresOptions)}`; + } +} + +function getCachedConnection(key: string): PostgresConnection | undefined { + const cachedLocally = localConnectionCache.get(key); + if (cachedLocally !== undefined) { + return cachedLocally; + } + + if (typeof globalThis !== "undefined") { + const cache = (globalThis as GlobalWithPgCache)[GLOBAL_CACHE_PROPERTY]; + return cache?.get(key); + } + + return undefined; +} + +function storeConnectionInCache( + key: string, + connection: PostgresConnection, +): void { + localConnectionCache.set(key, connection); + + if (process.env.NODE_ENV === "production") { + return; + } + + if (typeof globalThis === "undefined") { + return; + } + + const globalObject = globalThis as GlobalWithPgCache; + + if (globalObject[GLOBAL_CACHE_PROPERTY] === undefined) { + globalObject[GLOBAL_CACHE_PROPERTY] = new Map(); + } + + globalObject[GLOBAL_CACHE_PROPERTY]?.set(key, connection); +} + +export function createPostgresConnection( + options?: CreateConnectionOptions, +): PostgresConnection { + const connectionString = resolveConnectionString(options); + const cacheKey = createCacheKey(connectionString, options); + + if (!shouldReuseConnection(options)) { + return createNewConnection(connectionString, options); + } + + const cached = getCachedConnection(cacheKey); + if (cached !== undefined) { + return cached; + } + + const connection = createNewConnection(connectionString, options); + storeConnectionInCache(cacheKey, connection); + + return connection; +} + +export type Database = PostgresJsDatabase>; + +let cachedDatabase: Database | null = null; + +export function getDb(): Database { + if (cachedDatabase !== null) { + return cachedDatabase; + } + + const connection = createPostgresConnection(); + const database = drizzle(connection); + cachedDatabase = database; + return database; +} + +function sanitizeRole(role: unknown): string { + if (typeof role === "string" && /^[A-Za-z0-9_]+$/.test(role)) { + return role; + } + + return "anon"; +} + +export type SupabaseToken = { + iss?: string; + sub?: string; + aud?: string | Array; + exp?: number; + nbf?: number; + iat?: number; + jti?: string; + role?: string; +} & Record; + +export type RlsTransaction = Parameters< + Parameters[0] +>[0]; + +export interface CreateRlsClientOptions { + database?: Database; +} + +export interface RlsClient { + rls(transaction: (tx: RlsTransaction) => Promise): Promise; +} + +export function createRlsClient( + token: SupabaseToken, + options?: CreateRlsClientOptions, +): RlsClient { + const database = options?.database ?? getDb(); + const sanitizedRole = sanitizeRole(token.role); + const serializedToken = JSON.stringify(token); + const subject = typeof token.sub === "string" ? token.sub : ""; + + async function rls( + transaction: (tx: RlsTransaction) => Promise, + ): Promise { + return database.transaction(async (tx) => { + try { + await tx.execute(sql` + select set_config('request.jwt.claims', ${serializedToken}, TRUE); + select set_config('request.jwt.claim.sub', ${subject}, TRUE); + set local role ${sql.raw(sanitizedRole)}; + `); + + return await transaction(tx); + } finally { + await tx.execute(sql` + select set_config('request.jwt.claims', NULL, TRUE); + select set_config('request.jwt.claim.sub', NULL, TRUE); + reset role; + `); + } + }); + } + + return { rls }; +} + +export function createDrizzle( + token: SupabaseToken, + options?: CreateRlsClientOptions, +): RlsClient { + return createRlsClient(token, options); +} + +export { sql } from "drizzle-orm"; + +export * from "./schema/index.js"; diff --git a/packages/db/src/schema/index.ts b/packages/db/src/schema/index.ts new file mode 100644 index 0000000..868c7a1 --- /dev/null +++ b/packages/db/src/schema/index.ts @@ -0,0 +1,157 @@ +import { sql } from "drizzle-orm"; +import type { AnyPgColumn } from "drizzle-orm/pg-core"; +import { + boolean, + pgPolicy, + pgTable, + text, + timestamp, + uuid, +} from "drizzle-orm/pg-core"; +import { authenticatedRole } from "drizzle-orm/supabase"; + +const timestamps = { + createdAt: timestamp("created_at", { withTimezone: true }) + .notNull() + .defaultNow(), + updatedAt: timestamp("updated_at", { withTimezone: true }) + .notNull() + .defaultNow(), +}; + +export const profiles = pgTable( + "profiles", + { + id: uuid("id").primaryKey(), + email: text("email").notNull().unique(), + name: text("name"), + defaultCategoryId: uuid("default_category_id"), + ...timestamps, + }, + (table) => { + const isOwner = sql`${table.id} = (select auth.uid())`; + + return [ + pgPolicy("Users can view their profile", { + for: "select", + to: authenticatedRole, + using: isOwner, + }), + pgPolicy("Users can insert their profile", { + for: "insert", + to: authenticatedRole, + withCheck: isOwner, + }), + pgPolicy("Users can update their profile", { + for: "update", + to: authenticatedRole, + using: isOwner, + withCheck: isOwner, + }), + ]; + }, +); + +export type Profile = typeof profiles.$inferSelect; + +export const categories = pgTable( + "categories", + { + id: uuid("id").primaryKey().defaultRandom(), + name: text("name").notNull(), + kind: text("kind").notNull(), + createdBy: uuid("created_by") + .notNull() + .references((): AnyPgColumn => profiles.id, { onDelete: "restrict" }), + updatedBy: uuid("updated_by") + .notNull() + .references((): AnyPgColumn => profiles.id, { onDelete: "restrict" }), + ...timestamps, + }, + (table) => { + const isOwner = sql`${table.createdBy} = (select auth.uid())`; + + return [ + pgPolicy("Users can view their categories", { + for: "select", + to: authenticatedRole, + using: isOwner, + }), + pgPolicy("Users can insert categories", { + for: "insert", + to: authenticatedRole, + withCheck: isOwner, + }), + pgPolicy("Users can update their categories", { + for: "update", + to: authenticatedRole, + using: isOwner, + withCheck: isOwner, + }), + pgPolicy("Users can delete their categories", { + for: "delete", + to: authenticatedRole, + using: isOwner, + }), + ]; + }, +); + +export type Category = typeof categories.$inferSelect; + +export const tasks = pgTable( + "tasks", + { + id: uuid("id").primaryKey().defaultRandom(), + name: text("name").notNull(), + description: text("description"), + isChecked: boolean("is_checked").notNull().default(false), + categoryId: uuid("category_id") + .notNull() + .references((): AnyPgColumn => categories.id, { onDelete: "cascade" }), + createdBy: uuid("created_by") + .notNull() + .references((): AnyPgColumn => profiles.id, { onDelete: "restrict" }), + updatedBy: uuid("updated_by") + .notNull() + .references((): AnyPgColumn => profiles.id, { onDelete: "restrict" }), + ...timestamps, + }, + (table) => { + const hasAccess = sql` + ${table.createdBy} = (select auth.uid()) + OR EXISTS ( + SELECT 1 + FROM ${categories} + WHERE ${categories.id} = ${table.categoryId} + AND ${categories.createdBy} = (select auth.uid()) + ) + `; + + return [ + pgPolicy("Users can view their tasks", { + for: "select", + to: authenticatedRole, + using: hasAccess, + }), + pgPolicy("Users can insert tasks in their categories", { + for: "insert", + to: authenticatedRole, + withCheck: hasAccess, + }), + pgPolicy("Users can update their tasks", { + for: "update", + to: authenticatedRole, + using: hasAccess, + withCheck: hasAccess, + }), + pgPolicy("Users can delete their tasks", { + for: "delete", + to: authenticatedRole, + using: hasAccess, + }), + ]; + }, +); + +export type Task = typeof tasks.$inferSelect; diff --git a/packages/db/tsconfig.build.json b/packages/db/tsconfig.build.json new file mode 100644 index 0000000..02c50e7 --- /dev/null +++ b/packages/db/tsconfig.build.json @@ -0,0 +1,5 @@ +{ + "extends": "./tsconfig.json", + "exclude": ["dist", "**/*.test.ts", "**/__tests__/**"], + "include": ["src/**/*.ts"] +} diff --git a/packages/db/tsconfig.json b/packages/db/tsconfig.json new file mode 100644 index 0000000..72648e6 --- /dev/null +++ b/packages/db/tsconfig.json @@ -0,0 +1,13 @@ +{ + "extends": "../../tsconfig.json", + "compilerOptions": { + "declaration": true, + "declarationMap": true, + "outDir": "dist", + "rootDir": "src", + "verbatimModuleSyntax": true, + "types": ["@types/bun"] + }, + "include": ["src/**/*.ts", "src/**/*.d.ts"], + "exclude": ["dist"] +} diff --git a/packages/types/package.json b/packages/types/package.json new file mode 100644 index 0000000..14abaf7 --- /dev/null +++ b/packages/types/package.json @@ -0,0 +1,19 @@ +{ + "name": "@listee/types", + "version": "0.0.0", + "type": "module", + "main": "./dist/index.js", + "module": "./dist/index.js", + "types": "./dist/index.d.ts", + "sideEffects": false, + "files": [ + "dist" + ], + "scripts": { + "build": "tsc --project tsconfig.json", + "clean": "rm -rf dist" + }, + "dependencies": { + "@listee/db": "workspace:^" + } +} diff --git a/packages/types/src/api.ts b/packages/types/src/api.ts new file mode 100644 index 0000000..709e0cd --- /dev/null +++ b/packages/types/src/api.ts @@ -0,0 +1,124 @@ +import type { AuthenticationProvider } from "./authentication"; +import type { Category, PaginatedResult, Task } from "./db"; + +export interface ListCategoriesParams { + readonly userId: string; + readonly limit?: number; + readonly cursor?: string | null; +} + +export type ListCategoriesResult = PaginatedResult; + +export interface FindCategoryParams { + readonly categoryId: string; + readonly userId?: string; +} + +export interface CategoryQueries { + listByUserId(params: ListCategoriesParams): Promise; + findById(params: FindCategoryParams): Promise; +} + +export interface ListTasksParams { + readonly categoryId: string; + readonly userId?: string; +} + +export interface FindTaskParams { + readonly taskId: string; + readonly userId?: string; +} + +export interface TaskQueries { + listByCategory(params: ListTasksParams): Promise; + findById(params: FindTaskParams): Promise; +} + +export interface DatabaseHealthStatus { + readonly ok: boolean; + readonly error?: string; +} + +export type DatabaseHealthChecker = () => Promise; + +export interface ListCategoriesRepositoryParams { + readonly userId: string; + readonly limit: number; + readonly cursor?: string | null; +} + +export interface FindCategoryRepositoryParams { + readonly categoryId: string; + readonly userId?: string; +} + +export interface CategoryRepository { + listByUserId( + params: ListCategoriesRepositoryParams, + ): Promise>; + findById(params: FindCategoryRepositoryParams): Promise; +} + +export interface CategoryService { + listByUserId( + params: ListCategoriesRepositoryParams, + ): Promise>; + findById(params: FindCategoryRepositoryParams): Promise; +} + +export interface CategoryServiceDependencies { + readonly repository: CategoryRepository; +} + +export interface ListTasksRepositoryParams { + readonly categoryId: string; + readonly userId?: string; +} + +export interface FindTaskRepositoryParams { + readonly taskId: string; + readonly userId?: string; +} + +export interface TaskRepository { + listByCategory(params: ListTasksRepositoryParams): Promise; + findById(params: FindTaskRepositoryParams): Promise; +} + +export interface TaskService { + listByCategory(params: ListTasksRepositoryParams): Promise; + findById(params: FindTaskRepositoryParams): Promise; +} + +export interface TaskServiceDependencies { + readonly repository: TaskRepository; +} + +export interface CategoryQueriesDependencies { + readonly service: CategoryService; +} + +export interface TaskQueriesDependencies { + readonly service: TaskService; +} + +export interface RegisterCategoryRoutesOptions { + readonly queries?: CategoryQueries; + readonly authentication?: AuthenticationProvider; +} + +export interface RegisterTaskRoutesOptions { + readonly queries?: TaskQueries; + readonly authentication?: AuthenticationProvider; +} + +export interface RegisterHealthRoutesOptions { + readonly databaseHealth?: DatabaseHealthChecker; +} + +export interface AppDependencies { + readonly databaseHealth?: DatabaseHealthChecker; + readonly categoryQueries?: CategoryQueries; + readonly taskQueries?: TaskQueries; + readonly authentication?: AuthenticationProvider; +} diff --git a/packages/types/src/authentication.ts b/packages/types/src/authentication.ts new file mode 100644 index 0000000..ba39a1a --- /dev/null +++ b/packages/types/src/authentication.ts @@ -0,0 +1,41 @@ +import type { SupabaseToken } from "./db"; + +export interface HeaderToken { + readonly type: "header"; + readonly scheme: string; + readonly value: string; +} + +export type AuthenticatedToken = SupabaseToken | HeaderToken; + +export interface AuthenticatedUser { + readonly id: string; + readonly token: AuthenticatedToken; +} + +export interface AuthenticationContext { + readonly request: Request; +} + +export interface AuthenticationResult { + readonly user: AuthenticatedUser; +} + +export interface AuthenticationProvider { + authenticate(context: AuthenticationContext): Promise; +} + +export interface HeaderAuthenticationOptions { + readonly headerName?: string; + readonly scheme?: string; +} + +export interface SupabaseAuthenticationOptions + extends HeaderAuthenticationOptions { + readonly projectUrl: string; + readonly audience?: string | readonly string[]; + readonly issuer?: string; + readonly requiredRole?: string; + readonly clockToleranceSeconds?: number; + readonly jwksPath?: string; +} diff --git a/packages/types/src/db.ts b/packages/types/src/db.ts new file mode 100644 index 0000000..e14b16d --- /dev/null +++ b/packages/types/src/db.ts @@ -0,0 +1,26 @@ +import type { + Category, + categories, + Profile, + profiles, + SupabaseToken, + Task, + tasks, +} from "@listee/db"; + +export type { Category, Profile, SupabaseToken, Task }; + +export type NewProfile = typeof profiles.$inferInsert; +export type NewCategory = typeof categories.$inferInsert; +export type NewTask = typeof tasks.$inferInsert; + +export interface CategoryWithTasks { + readonly category: Category; + readonly tasks: readonly Task[]; +} + +export interface PaginatedResult { + readonly items: readonly T[]; + readonly nextCursor: string | null; + readonly hasMore: boolean; +} diff --git a/packages/types/src/index.ts b/packages/types/src/index.ts new file mode 100644 index 0000000..c218b4e --- /dev/null +++ b/packages/types/src/index.ts @@ -0,0 +1,3 @@ +export * from "./api.js"; +export * from "./authentication.js"; +export * from "./db.js"; diff --git a/packages/types/tsconfig.json b/packages/types/tsconfig.json new file mode 100644 index 0000000..7c9ff31 --- /dev/null +++ b/packages/types/tsconfig.json @@ -0,0 +1,11 @@ +{ + "extends": "../../tsconfig.json", + "compilerOptions": { + "declaration": true, + "declarationMap": true, + "outDir": "dist", + "verbatimModuleSyntax": true + }, + "include": ["src/**/*.ts"], + "exclude": ["dist"] +} diff --git a/tsconfig.json b/tsconfig.json new file mode 100644 index 0000000..fb5fc93 --- /dev/null +++ b/tsconfig.json @@ -0,0 +1,19 @@ +{ + "compilerOptions": { + "target": "ES2022", + "module": "ESNext", + "moduleResolution": "Bundler", + "strict": true, + "esModuleInterop": true, + "allowSyntheticDefaultImports": true, + "forceConsistentCasingInFileNames": true, + "skipLibCheck": true, + "baseUrl": ".", + "paths": { + "@listee/auth": ["packages/auth/src/index.ts"], + "@listee/db": ["packages/db/src/index.ts"], + "@listee/api": ["packages/api/src/index.ts"], + "@listee/types": ["packages/types/src/index.ts"] + } + } +}