diff --git a/.github/workflows/e2e-tests.yml b/.github/workflows/e2e-tests.yml new file mode 100644 index 000000000..e9c72670a --- /dev/null +++ b/.github/workflows/e2e-tests.yml @@ -0,0 +1,77 @@ +name: E2E Tests + +on: + push: + branches: [main] + pull_request: + +jobs: + e2e-tests: + name: Run E2E Tests + runs-on: ubuntu-latest + timeout-minutes: 10 + + steps: + - name: Checkout code + uses: actions/checkout@v4 + + - name: Setup pnpm + uses: pnpm/action-setup@v4 + with: + version: 10.19.0 + + - name: Setup Node.js + uses: actions/setup-node@v4 + with: + node-version: "20" + cache: "pnpm" + + - name: Install dependencies + run: pnpm install --frozen-lockfile + + - name: Start Docker services + run: | + cd packages/db-collection-e2e/docker + docker compose up -d + echo "Waiting for services to be healthy..." + timeout 60 bash -c 'until docker compose ps | grep -q "healthy"; do sleep 2; done' + + - name: Build packages + run: | + pnpm --filter @tanstack/db-ivm build + pnpm --filter @tanstack/db build + pnpm --filter @tanstack/electric-db-collection build + pnpm --filter @tanstack/query-db-collection build + + - name: Run Electric E2E tests + run: | + cd packages/electric-db-collection + pnpm test:e2e + env: + ELECTRIC_URL: http://localhost:3000 + POSTGRES_HOST: localhost + POSTGRES_PORT: 54321 + POSTGRES_USER: postgres + POSTGRES_PASSWORD: password + POSTGRES_DB: e2e_test + + - name: Run Query E2E tests + run: | + cd packages/query-db-collection + pnpm test:e2e + env: + ELECTRIC_URL: http://localhost:3000 + + - name: Stop Docker services + if: always() + run: | + cd packages/db-collection-e2e/docker + docker compose down -v + + - name: Upload test results + if: failure() + uses: actions/upload-artifact@v4 + with: + name: test-results + path: packages/db-collection-e2e/junit/ + retention-days: 7 diff --git a/docs/reference/functions/createCollection.md b/docs/reference/functions/createCollection.md index 6e039a885..753151d0d 100644 --- a/docs/reference/functions/createCollection.md +++ b/docs/reference/functions/createCollection.md @@ -39,7 +39,7 @@ The utilities record type #### options -[`CollectionConfig`](../../interfaces/CollectionConfig.md)\<[`InferSchemaOutput`](../../type-aliases/InferSchemaOutput.md)\<`T`\>, `TKey`, `T`, [`UtilsRecord`](../../type-aliases/UtilsRecord.md)\> & `object` & [`NonSingleResult`](../../type-aliases/NonSingleResult.md) +[`CollectionConfig`](../../interfaces/CollectionConfig.md)\<[`InferSchemaOutput`](../../type-aliases/InferSchemaOutput.md)\<`T`\>, `TKey`, `T`, `TUtils`\> & `object` & [`NonSingleResult`](../../type-aliases/NonSingleResult.md) Collection options with optional utilities @@ -148,7 +148,7 @@ The utilities record type #### options -[`CollectionConfig`](../../interfaces/CollectionConfig.md)\<[`InferSchemaOutput`](../../type-aliases/InferSchemaOutput.md)\<`T`\>, `TKey`, `T`, [`UtilsRecord`](../../type-aliases/UtilsRecord.md)\> & `object` & [`SingleResult`](../../type-aliases/SingleResult.md) +[`CollectionConfig`](../../interfaces/CollectionConfig.md)\<[`InferSchemaOutput`](../../type-aliases/InferSchemaOutput.md)\<`T`\>, `TKey`, `T`, `TUtils`\> & `object` & [`SingleResult`](../../type-aliases/SingleResult.md) Collection options with optional utilities @@ -257,7 +257,7 @@ The utilities record type #### options -[`CollectionConfig`](../../interfaces/CollectionConfig.md)\<`T`, `TKey`, `never`, [`UtilsRecord`](../../type-aliases/UtilsRecord.md)\> & `object` & [`NonSingleResult`](../../type-aliases/NonSingleResult.md) +[`CollectionConfig`](../../interfaces/CollectionConfig.md)\<`T`, `TKey`, `never`, `TUtils`\> & `object` & [`NonSingleResult`](../../type-aliases/NonSingleResult.md) Collection options with optional utilities @@ -366,7 +366,7 @@ The utilities record type #### options -[`CollectionConfig`](../../interfaces/CollectionConfig.md)\<`T`, `TKey`, `never`, [`UtilsRecord`](../../type-aliases/UtilsRecord.md)\> & `object` & [`SingleResult`](../../type-aliases/SingleResult.md) +[`CollectionConfig`](../../interfaces/CollectionConfig.md)\<`T`, `TKey`, `never`, `TUtils`\> & `object` & [`SingleResult`](../../type-aliases/SingleResult.md) Collection options with optional utilities diff --git a/docs/reference/functions/extractSimpleComparisons.md b/docs/reference/functions/extractSimpleComparisons.md index ffc7e9d31..282214367 100644 --- a/docs/reference/functions/extractSimpleComparisons.md +++ b/docs/reference/functions/extractSimpleComparisons.md @@ -9,13 +9,15 @@ title: extractSimpleComparisons function extractSimpleComparisons(expr): SimpleComparison[]; ``` -Defined in: [packages/db/src/query/expression-helpers.ts:323](https://github.com/TanStack/db/blob/main/packages/db/src/query/expression-helpers.ts#L323) +Defined in: [packages/db/src/query/expression-helpers.ts:327](https://github.com/TanStack/db/blob/main/packages/db/src/query/expression-helpers.ts#L327) Extracts all simple comparisons from a WHERE expression. This is useful for simple APIs that only support basic filters. -Note: This only works for simple AND-ed conditions. Throws an error if it encounters -unsupported operations like OR, NOT, or complex nested expressions. +Note: This only works for simple AND-ed conditions and NOT-wrapped comparisons. +Throws an error if it encounters unsupported operations like OR or complex nested expressions. + +NOT operators are flattened by prefixing the operator name (e.g., `not(eq(...))` becomes `not_eq`). ## Parameters @@ -33,7 +35,7 @@ Array of simple comparisons ## Throws -Error if expression contains OR, NOT, or other unsupported operations +Error if expression contains OR or other unsupported operations ## Example @@ -41,6 +43,8 @@ Error if expression contains OR, NOT, or other unsupported operations const comparisons = extractSimpleComparisons(where) // Returns: [ // { field: ['category'], operator: 'eq', value: 'electronics' }, -// { field: ['price'], operator: 'lt', value: 100 } +// { field: ['price'], operator: 'lt', value: 100 }, +// { field: ['email'], operator: 'isNull' }, // No value for null checks +// { field: ['status'], operator: 'not_eq', value: 'archived' } // ] ``` diff --git a/docs/reference/functions/localStorageCollectionOptions.md b/docs/reference/functions/localStorageCollectionOptions.md index 29b60f87f..3f4974b49 100644 --- a/docs/reference/functions/localStorageCollectionOptions.md +++ b/docs/reference/functions/localStorageCollectionOptions.md @@ -8,7 +8,7 @@ title: localStorageCollectionOptions ## Call Signature ```ts -function localStorageCollectionOptions(config): CollectionConfig, TKey, T, UtilsRecord> & object; +function localStorageCollectionOptions(config): CollectionConfig, TKey, T, LocalStorageCollectionUtils> & object; ``` Defined in: [packages/db/src/local-storage.ts:279](https://github.com/TanStack/db/blob/main/packages/db/src/local-storage.ts#L279) @@ -52,7 +52,7 @@ Configuration options for the localStorage collection ### Returns -[`CollectionConfig`](../../interfaces/CollectionConfig.md)\<[`InferSchemaOutput`](../../type-aliases/InferSchemaOutput.md)\<`T`\>, `TKey`, `T`, [`UtilsRecord`](../../type-aliases/UtilsRecord.md)\> & `object` +[`CollectionConfig`](../../interfaces/CollectionConfig.md)\<[`InferSchemaOutput`](../../type-aliases/InferSchemaOutput.md)\<`T`\>, `TKey`, `T`, [`LocalStorageCollectionUtils`](../../interfaces/LocalStorageCollectionUtils.md)\> & `object` Collection options with utilities including clearStorage, getStorageSize, and acceptMutations @@ -123,10 +123,10 @@ await tx.commit() ## Call Signature ```ts -function localStorageCollectionOptions(config): CollectionConfig & object; +function localStorageCollectionOptions(config): CollectionConfig & object; ``` -Defined in: [packages/db/src/local-storage.ts:294](https://github.com/TanStack/db/blob/main/packages/db/src/local-storage.ts#L294) +Defined in: [packages/db/src/local-storage.ts:299](https://github.com/TanStack/db/blob/main/packages/db/src/local-storage.ts#L299) Creates localStorage collection options for use with a standard Collection @@ -167,7 +167,7 @@ Configuration options for the localStorage collection ### Returns -[`CollectionConfig`](../../interfaces/CollectionConfig.md)\<`T`, `TKey`, `never`, [`UtilsRecord`](../../type-aliases/UtilsRecord.md)\> & `object` +[`CollectionConfig`](../../interfaces/CollectionConfig.md)\<`T`, `TKey`, `never`, [`LocalStorageCollectionUtils`](../../interfaces/LocalStorageCollectionUtils.md)\> & `object` Collection options with utilities including clearStorage, getStorageSize, and acceptMutations diff --git a/docs/reference/functions/parseLoadSubsetOptions.md b/docs/reference/functions/parseLoadSubsetOptions.md index 49e561fb5..880c34177 100644 --- a/docs/reference/functions/parseLoadSubsetOptions.md +++ b/docs/reference/functions/parseLoadSubsetOptions.md @@ -9,7 +9,7 @@ title: parseLoadSubsetOptions function parseLoadSubsetOptions(options): object; ``` -Defined in: [packages/db/src/query/expression-helpers.ts:420](https://github.com/TanStack/db/blob/main/packages/db/src/query/expression-helpers.ts#L420) +Defined in: [packages/db/src/query/expression-helpers.ts:499](https://github.com/TanStack/db/blob/main/packages/db/src/query/expression-helpers.ts#L499) Convenience function to get all LoadSubsetOptions in a pre-parsed format. Good starting point for simple use cases. diff --git a/docs/reference/interfaces/SimpleComparison.md b/docs/reference/interfaces/SimpleComparison.md index 08531edca..f8f71f670 100644 --- a/docs/reference/interfaces/SimpleComparison.md +++ b/docs/reference/interfaces/SimpleComparison.md @@ -31,10 +31,10 @@ Defined in: [packages/db/src/query/expression-helpers.ts:46](https://github.com/ *** -### value +### value? ```ts -value: any; +optional value: any; ``` Defined in: [packages/db/src/query/expression-helpers.ts:47](https://github.com/TanStack/db/blob/main/packages/db/src/query/expression-helpers.ts#L47) diff --git a/packages/db-collection-e2e/README.md b/packages/db-collection-e2e/README.md new file mode 100644 index 000000000..292ef31e4 --- /dev/null +++ b/packages/db-collection-e2e/README.md @@ -0,0 +1,489 @@ +# @tanstack/db-collection-e2e + +Shared end-to-end test suite for TanStack DB collections with query-driven sync and on-demand loading. + +## Overview + +This package provides a comprehensive e2e test suite that can be reused across different collection implementations (Electric, Query, etc.). It tests: + +- Predicate push-down and filtering +- Pagination, ordering, and window management +- Multi-collection joins with mixed syncModes +- Deduplication of concurrent loadSubset calls +- String collation configurations +- Mutations with on-demand mode +- Live updates (for sync-enabled collections) + +## Architecture + +### Package Structure + +``` +db-collection-e2e/ +├── docker/ # Docker Compose for test infrastructure +│ ├── docker-compose.yml # Postgres + Electric +│ └── postgres.conf # Optimized for fast tests +├── src/ +│ ├── types.ts # TypeScript interfaces +│ ├── fixtures/ # Test data and schemas +│ │ ├── seed-data.ts # Generate ~100 records per table +│ │ └── test-schema.ts # SQL schema definitions +│ ├── suites/ # Test suite modules +│ │ ├── predicates.test.ts +│ │ ├── pagination.test.ts +│ │ ├── joins.test.ts +│ │ ├── deduplication.test.ts +│ │ ├── collation.test.ts +│ │ ├── mutations.test.ts +│ │ ├── live-updates.test.ts +│ │ └── regressions.test.ts +│ └── utils/ # Helper functions +│ ├── helpers.ts # Common utilities +│ └── assertions.ts # Custom assertions +└── support/ # Vitest setup + ├── global-setup.ts # Health checks, DB init + └── test-context.ts # Vitest fixtures +``` + +## Getting Started + +### Prerequisites + +- Docker and Docker Compose +- Node.js 20+ +- pnpm 10+ + +### Installation + +From the repository root: + +```bash +pnpm install +``` + +### Running Tests Locally + +1. **Start Docker services:** + +```bash +cd packages/db-collection-e2e/docker +docker compose up -d +``` + +2. **Run tests:** + +```bash +cd packages/db-collection-e2e +pnpm test +``` + +3. **Stop Docker services:** + +```bash +cd packages/db-collection-e2e/docker +docker compose down +``` + +## Test Data Schema + +The test suite uses three related tables: + +### Users Table (~100 records) + +```typescript +interface User { + id: string // UUID + name: string // For collation testing + email: string | null + age: number + isActive: boolean + createdAt: Date + metadata: object | null // JSON field + deletedAt: Date | null // Soft delete +} +``` + +### Posts Table (~100 records) + +```typescript +interface Post { + id: string + userId: string // FK to User + title: string + content: string | null + viewCount: number + publishedAt: Date | null + deletedAt: Date | null +} +``` + +### Comments Table (~100 records) + +```typescript +interface Comment { + id: string + postId: string // FK to Post + userId: string // FK to User + text: string + createdAt: Date + deletedAt: Date | null +} +``` + +### Data Distributions + +Seed data includes: + +- Mix of null/non-null values +- Various string cases (uppercase, lowercase, special chars) +- Date ranges (past, present, future) +- Numeric ranges (negative, zero, positive) +- Some soft-deleted records (~10%) + +## Integrating with Your Collection + +### 1. Create Setup File + +Create `e2e/setup.ts` in your collection package. See real examples: + +- Electric: `packages/electric-db-collection/e2e/setup.ts` +- Query: `packages/query-db-collection/e2e/setup.ts` + +Example structure: + +```typescript +import { createCollection } from "@tanstack/db" +import { yourCollectionOptions } from "../src" +import type { + E2ETestConfig, + User, + Post, + Comment, +} from "../../db-collection-e2e/src/types" + +export async function createYourE2EConfig(options: { + schema: string + usersTable: string + postsTable: string + commentsTable: string +}): Promise { + // Create collections for both syncModes (eager and on-demand) + const eagerUsers = createCollection( + yourCollectionOptions({ + id: `your-e2e-users-eager-${Date.now()}`, + syncMode: "eager", + getKey: (item: User) => item.id, + startSync: false, + }) + ) + + const onDemandUsers = createCollection( + yourCollectionOptions({ + id: `your-e2e-users-ondemand-${Date.now()}`, + syncMode: "on-demand", + getKey: (item: User) => item.id, + startSync: false, + }) + ) + + // ... create posts and comments collections similarly + + return { + collections: { + eager: { users: eagerUsers, posts: eagerPosts, comments: eagerComments }, + onDemand: { + users: onDemandUsers, + posts: onDemandPosts, + comments: onDemandComments, + }, + }, + setup: async () => { + // Optional setup hook + }, + teardown: async () => { + await Promise.all([ + eagerUsers.cleanup(), + eagerPosts.cleanup(), + eagerComments.cleanup(), + onDemandUsers.cleanup(), + onDemandPosts.cleanup(), + onDemandComments.cleanup(), + ]) + }, + } +} +``` + +### 2. Create E2E Test File + +Create `e2e/your-collection.e2e.test.ts`: + +```typescript +import { describe, it, expect } from "vitest" +import { createCollection } from "@tanstack/db" +import { yourCollectionOptions } from "../src" + +describe("Your Collection E2E", () => { + it("should create collection", async () => { + const collection = createCollection( + yourCollectionOptions({ + id: "test-collection", + getKey: (item: any) => item.id, + startSync: false, + }) + ) + + expect(collection).toBeDefined() + expect(collection._sync.loadSubset).toBeDefined() + + await collection.cleanup() + }) +}) +``` + +### 3. Update Vitest Config + +Update your `vite.config.ts` to include e2e tests: + +```typescript +const config = defineConfig({ + test: { + include: ["tests/**/*.test.ts", "e2e/**/*.e2e.test.ts"], + // Remove dir: './tests' if present + }, +}) +``` + +### 4. Run Tests + +```bash +cd packages/your-collection +pnpm test +``` + +The e2e tests will run alongside your regular tests. + +## Test Suites + +All test suites are implemented in `src/suites/*.suite.ts` files and exported as factory functions. + +### Predicates Suite (`predicates.suite.ts`) + +Tests basic where clause functionality with ~20 test scenarios: + +**Example Test:** + +```typescript +it("should filter with eq() on number field", async () => { + const query = createLiveQueryCollection((q) => + q.from({ user: usersCollection }).where(({ user }) => eq(user.age, 25)) + ) + await query.preload() + + const results = Array.from(query.state.values()) + assertAllItemsMatch(query, (u) => u.age === 25) +}) +``` + +**Covers:** + +- `eq()`, `gt()`, `gte()`, `lt()`, `lte()` with all data types +- `inArray()` with arrays +- `isNull()`, `not(isNull())` for null checks +- Complex boolean logic with `and()`, `or()`, `not()` +- Predicate pushdown verification + +### Pagination Suite (`pagination.suite.ts`) + +Tests ordering and pagination with ~15 test scenarios: + +**Example Test:** + +```typescript +it("should sort ascending by single field", async () => { + const query = createLiveQueryCollection((q) => + q.from({ user: usersCollection }).orderBy(({ user }) => user.age, "asc") + ) + await query.preload() + + const results = Array.from(query.state.values()) + assertSorted(results, "age", "asc") +}) +``` + +**Covers:** + +- Basic `orderBy` (asc/desc) on various field types +- Multiple `orderBy` fields +- `limit` and `offset` for pagination +- Edge cases (limit=0, offset beyond dataset) +- Performance verification (only requested page loaded) + +### Joins Suite (`joins.suite.ts`) + +Tests multi-collection joins with ~12 test scenarios: + +**Example Test:** + +```typescript +it("should join Users and Posts", async () => { + const query = createLiveQueryCollection((q) => + q + .from({ user: usersCollection }) + .join({ post: postsCollection }, ({ user, post }) => + eq(user.id, post.userId) + ) + .select(({ user, post }) => ({ + id: post.id, + userName: user.name, + postTitle: post.title, + })) + ) + await query.preload() + + expect(query.size).toBeGreaterThan(0) +}) +``` + +**Covers:** + +- Two-collection joins (Users + Posts) +- Three-collection joins (Users + Posts + Comments) +- Mixed syncModes (eager + on-demand) +- Predicates on joined collections +- Left joins and ordering on joined results +- Pagination on joined results + +### Deduplication Suite + +Tests concurrent loadSubset calls: + +- Identical predicates called simultaneously +- Overlapping predicates (subset relationships) +- Queries during active loading +- Deduplication callback verification + +### Collation Suite + +Tests string collation: + +- Default collation behavior +- Custom `defaultStringCollation` +- Query-level collation override +- Case-sensitive vs case-insensitive + +### Mutations Suite + +Tests data mutations: + +- Insert, update, delete operations +- Soft delete pattern +- Concurrent mutations +- Reactive query updates + +### Live Updates Suite (Optional) + +Tests reactive updates (for sync-enabled collections): + +- Backend data changes +- Updates during loadSubset +- Multiple watchers +- Subscription lifecycle + +### Regression Suite + +Tests for known bugs: + +- Initial state sent multiple times (#7214245) +- Race conditions in multi-join +- Missing data in change tracking +- LoadSubset naming changes (#9874949) + +## Configuration + +### Environment Variables + +- `ELECTRIC_URL` - Electric server URL (default: `http://localhost:3000`) +- `POSTGRES_HOST` - Postgres host (default: `localhost`) +- `POSTGRES_PORT` - Postgres port (default: `54321`) +- `POSTGRES_USER` - Postgres user (default: `postgres`) +- `POSTGRES_PASSWORD` - Postgres password (default: `password`) +- `POSTGRES_DB` - Postgres database (default: `e2e_test`) + +### Docker Configuration + +The Docker Compose setup uses: + +- Postgres 16 Alpine with tmpfs for speed +- Electric canary image +- Health checks with 10s timeout +- Optimized postgres.conf for testing + +## Troubleshooting + +### Docker services not starting + +```bash +# Check service status +docker compose ps + +# View logs +docker compose logs + +# Restart services +docker compose restart +``` + +### Tests timing out + +- Increase `timeout` in `vite.config.ts` +- Check Docker resource limits +- Verify network connectivity + +### Database connection errors + +- Ensure Docker services are healthy +- Check environment variables +- Verify ports are not in use + +### Test isolation issues + +Tests use unique table names per test to prevent collisions: + +``` +"users_taskId_random" +``` + +If you see data from other tests, check that cleanup is working properly. + +## Performance + +Target execution time: **< 5 minutes** for entire suite + +Optimizations: + +- tmpfs for Postgres data directory +- Serial execution (`fileParallelism: false`) +- Minimal test data (~300 records total) +- Optimized Postgres configuration +- Health checks with fast intervals + +## Contributing + +When adding new test suites: + +1. Create new file in `src/suites/` +2. Export test factory function +3. Add to main exports in `src/index.ts` +4. Update README with test suite description +5. Ensure execution time stays < 5 minutes + +## License + +MIT + +## Related + +- [RFC #676](https://github.com/TanStack/db/discussions/676) - Query-driven sync RFC +- [PR #763](https://github.com/TanStack/db/pull/763) - Implementation PR +- [TanStack DB Documentation](https://tanstack.com/db) diff --git a/packages/db-collection-e2e/docker/docker-compose.yml b/packages/db-collection-e2e/docker/docker-compose.yml new file mode 100644 index 000000000..4f8ee4549 --- /dev/null +++ b/packages/db-collection-e2e/docker/docker-compose.yml @@ -0,0 +1,41 @@ +name: "tanstack-db-e2e" + +services: + postgres: + image: postgres:16-alpine + environment: + POSTGRES_DB: e2e_test + POSTGRES_USER: postgres + POSTGRES_PASSWORD: password + ports: + - "54321:5432" + volumes: + - ./postgres.conf:/etc/postgresql/postgresql.conf:ro + tmpfs: + - /var/lib/postgresql/data # Speed optimization: in-memory storage + - /tmp + command: + - postgres + - -c + - config_file=/etc/postgresql/postgresql.conf + healthcheck: + test: ["CMD-SHELL", "pg_isready -U postgres"] + interval: 2s + timeout: 10s + retries: 5 + + electric: + image: electricsql/electric:canary + environment: + DATABASE_URL: postgresql://postgres:password@postgres:5432/e2e_test?sslmode=disable + ELECTRIC_INSECURE: true + ports: + - "3000:3000" + depends_on: + postgres: + condition: service_healthy + healthcheck: + test: ["CMD-SHELL", "curl -f http://localhost:3000/v1/health || exit 1"] + interval: 2s + timeout: 10s + retries: 5 diff --git a/packages/db-collection-e2e/docker/postgres.conf b/packages/db-collection-e2e/docker/postgres.conf new file mode 100644 index 000000000..7a5643d1b --- /dev/null +++ b/packages/db-collection-e2e/docker/postgres.conf @@ -0,0 +1,23 @@ +# PostgreSQL configuration for e2e tests +# Optimized for fast test execution + +# Connection settings +listen_addresses = '*' +max_connections = 100 +shared_buffers = 128MB + +# Write-Ahead Logging +wal_level = logical +max_wal_senders = 10 +max_replication_slots = 10 + +# Performance +fsync = off # Safe for testing, NOT for production +synchronous_commit = off # Safe for testing, NOT for production +full_page_writes = off # Safe for testing, NOT for production + +# Logging (minimal for tests) +log_statement = 'none' +log_duration = off +log_min_duration_statement = -1 + diff --git a/packages/db-collection-e2e/package.json b/packages/db-collection-e2e/package.json new file mode 100644 index 000000000..69b4eff42 --- /dev/null +++ b/packages/db-collection-e2e/package.json @@ -0,0 +1,27 @@ +{ + "name": "@tanstack/db-collection-e2e", + "version": "0.0.1", + "private": true, + "description": "End-to-end test suite for TanStack DB collections", + "type": "module", + "scripts": { + "test": "echo 'This package contains shared test suites. Run tests from electric-db-collection or query-db-collection packages.'", + "docker:up": "docker compose -f docker/docker-compose.yml up -d", + "docker:down": "docker compose -f docker/docker-compose.yml down --volumes", + "docker:logs": "docker compose -f docker/docker-compose.yml logs -f" + }, + "dependencies": { + "@tanstack/db": "workspace:*", + "@tanstack/electric-db-collection": "workspace:*", + "@tanstack/query-db-collection": "workspace:*", + "pg": "^8.13.1" + }, + "devDependencies": { + "@types/pg": "^8.11.10", + "@vitest/ui": "^3.2.4", + "typescript": "^5.9.2", + "vite": "^7.1.12", + "vitest": "^3.2.4" + }, + "packageManager": "pnpm@10.19.0" +} diff --git a/packages/db-collection-e2e/src/fixtures/seed-data.ts b/packages/db-collection-e2e/src/fixtures/seed-data.ts new file mode 100644 index 000000000..04b849270 --- /dev/null +++ b/packages/db-collection-e2e/src/fixtures/seed-data.ts @@ -0,0 +1,227 @@ +import type { Comment, Post, SeedDataResult, User } from "../types" + +// Cache UUIDs for deterministic behavior across test runs +const uuidCache = new Map() + +/** + * Generate deterministic valid UUIDs for testing + */ +function generateId(prefix: string, index: number): string { + const key = `${prefix}-${index}` + if (!uuidCache.has(key)) { + // Generate a real UUID but make it deterministic for the same prefix+index + const hex = index.toString(16).padStart(8, `0`) + // Create a valid UUID v4 format + uuidCache.set( + key, + `${hex.slice(0, 8)}-0000-4000-8000-${hex.padStart(12, `0`)}` + ) + } + return uuidCache.get(key)! +} + +/** + * Generate seed data with proper distributions for testing + * + * Data characteristics: + * - ~100 users with varied attributes + * - ~100 posts distributed across users + * - ~100 comments distributed across posts + * - Mix of null/non-null values + * - Various string cases (uppercase, lowercase, special chars) + * - Date ranges (past, present, future) + * - Numeric ranges (negative, zero, positive) + * - Some soft-deleted records + */ +export function generateSeedData(): SeedDataResult { + const users: Array = [] + const posts: Array = [] + const comments: Array = [] + + const now = new Date() + const oneDay = 24 * 60 * 60 * 1000 + const oneYear = 365 * oneDay + + // Generate 100 users with varied distributions + for (let i = 0; i < 100; i++) { + const id = generateId(`user`, i) + + // Name variations for collation testing + const names = [ + `Alice ${i}`, + `bob ${i}`, + `Charlie ${i}`, + `DIANA ${i}`, + `Eve ${i}`, + `Frank ${i}`, + `Grace ${i}`, + `henry ${i}`, + `Ivy ${i}`, + `Jack ${i}`, + `Kate ${i}`, + `liam ${i}`, + `Mia ${i}`, + `Noah ${i}`, + `Olivia ${i}`, + `PAUL ${i}`, + `Quinn ${i}`, + `Rose ${i}`, + `sam ${i}`, + `Tina ${i}`, + ] + const name = names[i % names.length] + + // Email: 70% have emails, 30% null + const email = i % 10 < 7 ? `user${i}@example.com` : null + + // Age distribution: 18-80, with some edge cases + const age = i === 0 ? 0 : i === 1 ? -5 : i === 2 ? 150 : 18 + (i % 63) + + // IsActive: 80% true, 20% false + const isActive = i % 5 !== 0 + + // CreatedAt: distributed over past year + const createdAt = new Date(now.getTime() - Math.random() * oneYear) + + // Metadata: 40% have metadata, 60% null + const metadata = + i % 5 < 2 ? { score: i * 10, level: Math.floor(i / 10) } : null + + // DeletedAt: 10% soft deleted + const deletedAt = + i % 10 === 0 + ? new Date(now.getTime() - Math.random() * oneDay * 30) + : null + + users.push({ + id: id, + name: name!, + email, + age, + isActive, + createdAt, + metadata, + deletedAt, + }) + } + + // Generate 100 posts distributed across users + for (let i = 0; i < 100; i++) { + const id = generateId(`post`, i) + + // Distribute posts across users (some users have multiple posts) + const userId = users[i % users.length]!.id + + // Title variations + const titles = [ + `Introduction to ${i}`, + `Deep Dive: Topic ${i}`, + `Quick Guide ${i}`, + `ANNOUNCEMENT: ${i}`, + `tutorial ${i}`, + `Best Practices ${i}`, + ] + const title = titles[i % titles.length]! + + // Content: 70% have content, 30% null + const content = + i % 10 < 7 + ? `This is the content for post ${i}. Lorem ipsum dolor sit amet.` + : null + + // ViewCount: varied distribution + const viewCount = i === 0 ? 0 : i === 1 ? -10 : i * 42 + + // PublishedAt: 80% published, 20% null (drafts) + const publishedAt = + i % 5 !== 0 ? new Date(now.getTime() - Math.random() * oneYear) : null + + // DeletedAt: 5% soft deleted + const deletedAt = + i % 20 === 0 + ? new Date(now.getTime() - Math.random() * oneDay * 10) + : null + + posts.push({ + id, + userId, + title, + content, + viewCount, + publishedAt, + deletedAt, + }) + } + + // Generate 100 comments distributed across posts + for (let i = 0; i < 100; i++) { + const id = generateId(`comment`, i) + + // Distribute comments across posts (some posts have multiple comments) + const postId = posts[i % posts.length]!.id + const userId = users[(i * 3) % users.length]!.id + + // Text variations + const texts = [ + `Great post! Comment ${i}`, + `I disagree with comment ${i}`, + `question about ${i}`, + `AMAZING WORK ${i}`, + `thanks for sharing ${i}`, + `Very helpful comment ${i}`, + ] + const text = texts[i % texts.length]! + + // CreatedAt: distributed over past 6 months + const createdAt = new Date(now.getTime() - Math.random() * (oneYear / 2)) + + // DeletedAt: 8% soft deleted + const deletedAt = + i % 13 === 0 ? new Date(now.getTime() - Math.random() * oneDay * 5) : null + + comments.push({ + id, + postId, + userId, + text, + createdAt, + deletedAt, + }) + } + + return { + users, + posts, + comments, + userIds: users.map((u) => u.id), + postIds: posts.map((p) => p.id), + commentIds: comments.map((c) => c.id), + } +} + +/** + * Get expected counts for different predicate scenarios + */ +export function getExpectedCounts(seedData: SeedDataResult) { + return { + // Users + totalUsers: seedData.users.length, + activeUsers: seedData.users.filter((u) => u.isActive).length, + deletedUsers: seedData.users.filter((u) => u.deletedAt !== null).length, + usersWithEmail: seedData.users.filter((u) => u.email !== null).length, + usersWithoutEmail: seedData.users.filter((u) => u.email === null).length, + usersWithMetadata: seedData.users.filter((u) => u.metadata !== null).length, + + // Posts + totalPosts: seedData.posts.length, + publishedPosts: seedData.posts.filter((p) => p.publishedAt !== null).length, + draftPosts: seedData.posts.filter((p) => p.publishedAt === null).length, + deletedPosts: seedData.posts.filter((p) => p.deletedAt !== null).length, + postsWithContent: seedData.posts.filter((p) => p.content !== null).length, + + // Comments + totalComments: seedData.comments.length, + deletedComments: seedData.comments.filter((c) => c.deletedAt !== null) + .length, + } +} diff --git a/packages/db-collection-e2e/src/fixtures/test-schema.ts b/packages/db-collection-e2e/src/fixtures/test-schema.ts new file mode 100644 index 000000000..9b3bbe160 --- /dev/null +++ b/packages/db-collection-e2e/src/fixtures/test-schema.ts @@ -0,0 +1,106 @@ +/** + * SQL schema definitions for test tables + */ + +export const USERS_TABLE_SCHEMA = ` + id UUID PRIMARY KEY, + name TEXT NOT NULL, + email TEXT, + age INTEGER NOT NULL, + "isActive" BOOLEAN NOT NULL DEFAULT true, + "createdAt" TIMESTAMP NOT NULL DEFAULT NOW(), + metadata JSONB, + "deletedAt" TIMESTAMP +` + +export const POSTS_TABLE_SCHEMA = ` + id UUID PRIMARY KEY, + "userId" UUID NOT NULL, + title TEXT NOT NULL, + content TEXT, + "viewCount" INTEGER NOT NULL DEFAULT 0, + "publishedAt" TIMESTAMP, + "deletedAt" TIMESTAMP +` + +export const COMMENTS_TABLE_SCHEMA = ` + id UUID PRIMARY KEY, + "postId" UUID NOT NULL, + "userId" UUID NOT NULL, + text TEXT NOT NULL, + "createdAt" TIMESTAMP NOT NULL DEFAULT NOW(), + "deletedAt" TIMESTAMP +` + +/** + * Helper to create all test tables + */ +export async function createTestTables( + dbClient: { query: (sql: string) => Promise }, + tableNames: { + users: string + posts: string + comments: string + } +): Promise { + await dbClient.query(` + CREATE TABLE IF NOT EXISTS ${tableNames.users} (${USERS_TABLE_SCHEMA}); + `) + + await dbClient.query(` + CREATE TABLE IF NOT EXISTS ${tableNames.posts} (${POSTS_TABLE_SCHEMA}); + `) + + await dbClient.query(` + CREATE TABLE IF NOT EXISTS ${tableNames.comments} (${COMMENTS_TABLE_SCHEMA}); + `) +} + +/** + * Helper to drop all test tables + */ +export async function dropTestTables( + dbClient: { query: (sql: string) => Promise }, + tableNames: { + users: string + posts: string + comments: string + } +): Promise { + await dbClient.query(`DROP TABLE IF EXISTS ${tableNames.comments}`) + await dbClient.query(`DROP TABLE IF EXISTS ${tableNames.posts}`) + await dbClient.query(`DROP TABLE IF EXISTS ${tableNames.users}`) +} + +/** + * Type-safe table column mappings + */ +export const USER_COLUMNS = { + id: `id`, + name: `name`, + email: `email`, + age: `age`, + isActive: `isActive`, + createdAt: `createdAt`, + metadata: `metadata`, + deletedAt: `deletedAt`, +} as const + +export const POST_COLUMNS = { + id: `id`, + userId: `userId`, + title: `title`, + content: `content`, + viewCount: `viewCount`, + publishedAt: `publishedAt`, + deletedAt: `deletedAt`, +} as const + +export const COMMENT_COLUMNS = { + id: `id`, + postId: `postId`, + userId: `userId`, + text: `text`, + createdAt: `createdAt`, + deletedAt: `deletedAt`, +} as const diff --git a/packages/db-collection-e2e/src/index.ts b/packages/db-collection-e2e/src/index.ts new file mode 100644 index 000000000..04509bb9e --- /dev/null +++ b/packages/db-collection-e2e/src/index.ts @@ -0,0 +1,27 @@ +/** + * @tanstack/db-collection-e2e + * + * Shared end-to-end test suite for TanStack DB collections + */ + +export * from "./types" +export * from "./fixtures/test-schema" +export * from "./fixtures/seed-data" +export * from "./utils/helpers" +export * from "./utils/assertions" + +// Export specific utilities for convenience +export { + waitFor, + waitForQueryData, + waitForCollectionSize, +} from "./utils/helpers" + +// Export test suite creators +export { createPredicatesTestSuite } from "./suites/predicates.suite" +export { createPaginationTestSuite } from "./suites/pagination.suite" +export { createJoinsTestSuite } from "./suites/joins.suite" +export { createDeduplicationTestSuite } from "./suites/deduplication.suite" +export { createCollationTestSuite } from "./suites/collation.suite" +export { createMutationsTestSuite } from "./suites/mutations.suite" +export { createLiveUpdatesTestSuite } from "./suites/live-updates.suite" diff --git a/packages/db-collection-e2e/src/suites/collation.suite.ts b/packages/db-collection-e2e/src/suites/collation.suite.ts new file mode 100644 index 000000000..360f49340 --- /dev/null +++ b/packages/db-collection-e2e/src/suites/collation.suite.ts @@ -0,0 +1,146 @@ +/** + * Collation Test Suite + * + * Tests string collation configuration and behavior + */ + +import { describe, expect, it } from "vitest" +import { createLiveQueryCollection, eq } from "@tanstack/db" +import { waitForQueryData } from "../utils/helpers" +import type { E2ETestConfig } from "../types" + +export function createCollationTestSuite( + getConfig: () => Promise +) { + describe(`Collation Suite`, () => { + describe(`Default Collation`, () => { + it(`should use default collation for string comparisons`, async () => { + const config = await getConfig() + const usersCollection = config.collections.onDemand.users + + const query = createLiveQueryCollection((q) => + q + .from({ user: usersCollection }) + .orderBy(({ user }) => user.name, `asc`) + ) + + await query.preload() + await waitForQueryData(query, { minSize: 1 }) + + const results = Array.from(query.state.values()) + expect(results.length).toBeGreaterThan(0) + + await query.cleanup() + }) + + it(`should handle case-sensitive comparisons by default`, async () => { + const config = await getConfig() + const usersCollection = config.collections.onDemand.users + + // Different case variations should be treated as different + const query = createLiveQueryCollection( + (q) => + q + .from({ user: usersCollection }) + .where(({ user }) => eq(user.name, `alice 0`)) // lowercase + ) + + await query.preload() + + const results = Array.from(query.state.values()) + // Should NOT match because seed data has "Alice 0" (capitalized) and default is case-sensitive + expect(results.length).toBe(0) + + await query.cleanup() + }) + }) + + describe(`Custom Collection-Level Collation`, () => { + it(`should use custom defaultStringCollation at collection level`, async () => { + const config = await getConfig() + + // Test will use collection with custom collation if provided + const query = createLiveQueryCollection({ + query: (q) => q.from({ user: config.collections.onDemand.users }), + defaultStringCollation: { + stringSort: `lexical`, + }, + }) + + await query.preload() + + expect(query.compareOptions.stringSort).toBe(`lexical`) + + await query.cleanup() + }) + + it(`should support locale-based collation`, async () => { + const config = await getConfig() + + const query = createLiveQueryCollection({ + query: (q) => q.from({ user: config.collections.onDemand.users }), + defaultStringCollation: { + stringSort: `locale`, + locale: `de-DE`, + }, + }) + + await query.preload() + + expect(query.compareOptions.stringSort).toBe(`locale`) + // Type narrow: when stringSort is 'locale', locale property exists + if (query.compareOptions.stringSort === `locale`) { + expect(query.compareOptions.locale).toBe(`de-DE`) + } + + await query.cleanup() + }) + }) + + describe(`Query-Level Collation Override`, () => { + it(`should override collection collation at query level`, async () => { + const config = await getConfig() + + const query = createLiveQueryCollection({ + query: (q) => q.from({ user: config.collections.onDemand.users }), + defaultStringCollation: { + stringSort: `lexical`, + }, + }) + + await query.preload() + + expect(query.compareOptions.stringSort).toBe(`lexical`) + + await query.cleanup() + }) + }) + + describe(`Collation in OrderBy`, () => { + it(`should respect collation when sorting strings`, async () => { + const config = await getConfig() + const usersCollection = config.collections.onDemand.users + + const query = createLiveQueryCollection((q) => + q + .from({ user: usersCollection }) + .orderBy(({ user }) => user.name, `asc`) + ) + + await query.preload() + await waitForQueryData(query, { minSize: 1 }) + + const results = Array.from(query.state.values()) + expect(results.length).toBeGreaterThan(0) + + // Verify sorted (actual sort order depends on collation) + for (let i = 1; i < results.length; i++) { + // Just verify it's sorted, regardless of collation + expect(results[i - 1]!.name).toBeTruthy() + } + + await query.cleanup() + }) + }) + }) +} diff --git a/packages/db-collection-e2e/src/suites/deduplication.suite.ts b/packages/db-collection-e2e/src/suites/deduplication.suite.ts new file mode 100644 index 000000000..c8c6a0b04 --- /dev/null +++ b/packages/db-collection-e2e/src/suites/deduplication.suite.ts @@ -0,0 +1,350 @@ +/** + * Deduplication Test Suite + * + * Tests concurrent loadSubset calls and deduplication behavior + */ + +import { describe, expect, it } from "vitest" +import { createLiveQueryCollection, eq, gt, isNull, lt } from "@tanstack/db" +import { waitForQueryData } from "../utils/helpers" +import type { E2ETestConfig } from "../types" + +export function createDeduplicationTestSuite( + getConfig: () => Promise +) { + describe(`Deduplication Suite`, () => { + describe(`Identical Predicates`, () => { + it(`should deduplicate identical concurrent queries`, async () => { + const config = await getConfig() + const usersCollection = config.collections.onDemand.users + + // Create two identical queries + const query1 = createLiveQueryCollection((q) => + q + .from({ user: usersCollection }) + .where(({ user }) => eq(user.age, 25)) + ) + + const query2 = createLiveQueryCollection((q) => + q + .from({ user: usersCollection }) + .where(({ user }) => eq(user.age, 25)) + ) + + // Execute concurrently + await Promise.all([query1.preload(), query2.preload()]) + await Promise.all([ + waitForQueryData(query1, { minSize: 1 }), + waitForQueryData(query2, { minSize: 1 }), + ]) + + // Both should have same results + expect(query1.size).toBe(query2.size) + expect(query1.size).toBeGreaterThan(0) + + await Promise.all([query1.cleanup(), query2.cleanup()]) + }) + + it(`should deduplicate multiple identical queries`, async () => { + const config = await getConfig() + const usersCollection = config.collections.onDemand.users + + // Create 5 identical queries + const queries = Array.from({ length: 5 }, () => + createLiveQueryCollection((q) => + q + .from({ user: usersCollection }) + .where(({ user }) => gt(user.age, 30)) + ) + ) + + // Execute all concurrently + await Promise.all(queries.map((q) => q.preload())) + + // All should have same results + const firstSize = queries[0]?.size + expect(firstSize).toBeDefined() + queries.forEach((q) => { + expect(q.size).toBe(firstSize!) + }) + + await Promise.all(queries.map((q) => q.cleanup())) + }) + }) + + describe(`Overlapping Predicates`, () => { + it(`should handle subset predicates correctly`, async () => { + const config = await getConfig() + const usersCollection = config.collections.onDemand.users + + // Query 1: age > 25 (superset) + const query1 = createLiveQueryCollection((q) => + q + .from({ user: usersCollection }) + .where(({ user }) => gt(user.age, 25)) + ) + + // Query 2: age > 30 (subset of Query 1) + const query2 = createLiveQueryCollection((q) => + q + .from({ user: usersCollection }) + .where(({ user }) => gt(user.age, 30)) + ) + + // Execute concurrently + await Promise.all([query1.preload(), query2.preload()]) + + // Query 2 results should be subset of Query 1 + expect(query2.size).toBeLessThanOrEqual(query1.size) + + await Promise.all([query1.cleanup(), query2.cleanup()]) + }) + + it(`should handle non-overlapping predicates correctly`, async () => { + const config = await getConfig() + const usersCollection = config.collections.onDemand.users + + // Query 1: age > 50 + const query1 = createLiveQueryCollection((q) => + q + .from({ user: usersCollection }) + .where(({ user }) => gt(user.age, 50)) + ) + + // Query 2: age < 30 + const query2 = createLiveQueryCollection((q) => + q + .from({ user: usersCollection }) + .where(({ user }) => lt(user.age, 30)) + ) + + // Execute concurrently + await Promise.all([query1.preload(), query2.preload()]) + await Promise.all([ + waitForQueryData(query1, { minSize: 1 }), + waitForQueryData(query2, { minSize: 1 }), + ]) + + // Both should execute independently + expect(query1.size).toBeGreaterThan(0) + expect(query2.size).toBeGreaterThan(0) + + await Promise.all([query1.cleanup(), query2.cleanup()]) + }) + }) + + describe(`Queries During Loading`, () => { + it(`should handle queries arriving during active load`, async () => { + const config = await getConfig() + const usersCollection = config.collections.onDemand.users + + // Start first query + const query1 = createLiveQueryCollection((q) => + q + .from({ user: usersCollection }) + .where(({ user }) => eq(user.isActive, true)) + ) + + // Immediately start identical second query + const query2 = createLiveQueryCollection((q) => + q + .from({ user: usersCollection }) + .where(({ user }) => eq(user.isActive, true)) + ) + + // Both should complete successfully + await Promise.all([query1.preload(), query2.preload()]) + + await Promise.all([query1.cleanup(), query2.cleanup()]) + }) + + it(`should handle query arriving mid-flight with different predicate`, async () => { + const config = await getConfig() + const usersCollection = config.collections.onDemand.users + + const query1 = createLiveQueryCollection((q) => + q + .from({ user: usersCollection }) + .where(({ user }) => gt(user.age, 25)) + ) + + const query2 = createLiveQueryCollection((q) => + q + .from({ user: usersCollection }) + .where(({ user }) => gt(user.age, 50)) + ) + + await Promise.all([query1.preload(), query2.preload()]) + + // Both execute correctly + expect(query1.size).toBeGreaterThanOrEqual(query2.size) + + await Promise.all([query1.cleanup(), query2.cleanup()]) + }) + }) + + describe(`Deduplication with Limit/Offset`, () => { + it(`should handle queries with same limit/offset`, async () => { + const config = await getConfig() + const usersCollection = config.collections.onDemand.users + + const query1 = createLiveQueryCollection((q) => + q + .from({ user: usersCollection }) + .orderBy(({ user }) => user.id, `asc`) + .limit(10) + .offset(0) + ) + + const query2 = createLiveQueryCollection((q) => + q + .from({ user: usersCollection }) + .orderBy(({ user }) => user.id, `asc`) + .limit(10) + .offset(0) + ) + + await Promise.all([query1.preload(), query2.preload()]) + await waitForQueryData(query1, { minSize: 10 }) + await waitForQueryData(query2, { minSize: 10 }) + + expect(query1.size).toBe(query2.size) + expect(query1.size).toBe(10) + + await Promise.all([query1.cleanup(), query2.cleanup()]) + }) + + it(`should handle queries with different limits`, async () => { + const config = await getConfig() + const usersCollection = config.collections.onDemand.users + + const query1 = createLiveQueryCollection((q) => + q + .from({ user: usersCollection }) + .orderBy(({ user }) => user.id, `asc`) + .limit(10) + ) + + const query2 = createLiveQueryCollection((q) => + q + .from({ user: usersCollection }) + .orderBy(({ user }) => user.id, `asc`) + .limit(20) + ) + + await Promise.all([query1.preload(), query2.preload()]) + await waitForQueryData(query1, { minSize: 10 }) + await waitForQueryData(query2, { minSize: 20 }) + + expect(query1.size).toBe(10) + expect(query2.size).toBe(20) + + await Promise.all([query1.cleanup(), query2.cleanup()]) + }) + + it(`should handle queries with different offsets`, async () => { + const config = await getConfig() + const usersCollection = config.collections.onDemand.users + + const query1 = createLiveQueryCollection((q) => + q + .from({ user: usersCollection }) + .orderBy(({ user }) => user.id, `asc`) + .limit(10) + .offset(0) + ) + + const query2 = createLiveQueryCollection((q) => + q + .from({ user: usersCollection }) + .orderBy(({ user }) => user.id, `asc`) + .limit(10) + .offset(10) + ) + + await Promise.all([query1.preload(), query2.preload()]) + await waitForQueryData(query1, { minSize: 10 }) + await waitForQueryData(query2, { minSize: 10 }) + + expect(query1.size).toBe(10) + expect(query2.size).toBe(10) + + // Results should be different + const ids1 = Array.from(query1.state.values()).map((u) => u.id) + const ids2 = Array.from(query2.state.values()).map((u) => u.id) + expect(ids1).not.toEqual(ids2) + + await Promise.all([query1.cleanup(), query2.cleanup()]) + }) + }) + + describe(`Race Conditions`, () => { + it(`should handle rapid concurrent query bursts`, async () => { + const config = await getConfig() + const usersCollection = config.collections.onDemand.users + + // Create 20 identical queries + const queries = Array.from({ length: 20 }, () => + createLiveQueryCollection((q) => + q + .from({ user: usersCollection }) + .where(({ user }) => eq(user.isActive, true)) + ) + ) + + // Execute all simultaneously + await Promise.all(queries.map((q) => q.preload())) + + // All should have same results + const firstSize = queries[0]?.size + expect(firstSize).toBeDefined() + queries.forEach((q) => { + expect(q.size).toBe(firstSize!) + }) + + await Promise.all(queries.map((q) => q.cleanup())) + }) + + it(`should not corrupt data with concurrent queries`, async () => { + const config = await getConfig() + const usersCollection = config.collections.onDemand.users + + // Execute many concurrent queries with different predicates + const queries = [ + createLiveQueryCollection((q) => + q + .from({ user: usersCollection }) + .where(({ user }) => eq(user.age, 25)) + ), + createLiveQueryCollection((q) => + q + .from({ user: usersCollection }) + .where(({ user }) => gt(user.age, 50)) + ), + createLiveQueryCollection((q) => + q + .from({ user: usersCollection }) + .where(({ user }) => eq(user.isActive, true)) + ), + createLiveQueryCollection((q) => + q + .from({ user: usersCollection }) + .where(({ user }) => isNull(user.email)) + ), + ] + + await Promise.all(queries.map((q) => q.preload())) + await Promise.all( + queries.map((q) => waitForQueryData(q, { minSize: 1 })) + ) + + // Each query should have valid results + queries.forEach((q) => { + expect(q.size).toBeGreaterThan(0) + }) + + await Promise.all(queries.map((q) => q.cleanup())) + }) + }) + }) +} diff --git a/packages/db-collection-e2e/src/suites/joins.suite.ts b/packages/db-collection-e2e/src/suites/joins.suite.ts new file mode 100644 index 000000000..0771e333c --- /dev/null +++ b/packages/db-collection-e2e/src/suites/joins.suite.ts @@ -0,0 +1,376 @@ +/** + * Joins Test Suite + * + * Tests multi-collection joins with various syncMode combinations + */ + +import { describe, expect, it } from "vitest" +import { createLiveQueryCollection, eq, gt, isNull } from "@tanstack/db" +import { waitFor, waitForQueryData } from "../utils/helpers" +import type { E2ETestConfig } from "../types" + +export function createJoinsTestSuite(getConfig: () => Promise) { + describe(`Joins Suite`, () => { + describe(`Two-Collection Joins`, () => { + it(`should join Users and Posts`, async () => { + const config = await getConfig() + const usersCollection = config.collections.onDemand.users + const postsCollection = config.collections.onDemand.posts + + const query = createLiveQueryCollection((q) => + q + .from({ user: usersCollection }) + .join({ post: postsCollection }, ({ user, post }) => + eq(user.id, post.userId) + ) + .select(({ user, post }) => ({ + id: post!.id, + userName: user.name, + postTitle: post!.title, + })) + ) + + await query.preload() + await waitForQueryData(query, { minSize: 1 }) + + const results = Array.from(query.state.values()) + expect(results.length).toBeGreaterThan(0) + expect(results[0]!).toHaveProperty(`userName`) + expect(results[0]!).toHaveProperty(`postTitle`) + + await query.cleanup() + }) + + it(`should join with predicates on both collections`, async () => { + const config = await getConfig() + const usersCollection = config.collections.onDemand.users + const postsCollection = config.collections.onDemand.posts + + const query = createLiveQueryCollection((q) => + q + .from({ user: usersCollection }) + .where(({ user }) => eq(user.isActive, true)) + .join({ post: postsCollection }, ({ user, post }) => + eq(user.id, post.userId) + ) + .where(({ post }) => gt(post!.viewCount, 10)) + .select(({ user, post }) => ({ + id: post!.id, + userName: user.name, + postTitle: post!.title, + viewCount: post!.viewCount, + })) + ) + + await query.preload() + + const results = Array.from(query.state.values()) + // Verify predicates applied + results.forEach((r) => { + expect(r.viewCount).toBeGreaterThan(10) + }) + + await query.cleanup() + }) + + it( + `should join with one eager, one on-demand`, + { timeout: 60000 }, + async () => { + const config = await getConfig() + const usersEager = config.collections.eager.users + const postsOnDemand = config.collections.onDemand.posts + + const query = createLiveQueryCollection((q) => + q + .from({ user: usersEager }) + .join({ post: postsOnDemand }, ({ user, post }) => + eq(user.id, post.userId) + ) + .select(({ user, post }) => ({ + id: post!.id, + userName: user.name, + postTitle: post!.title, + })) + ) + + await query.preload() + // Joins with eager + on-demand collections may need more time to load data from multiple sources + // Use longer timeout for CI environments which can be slower + await waitForQueryData(query, { minSize: 1, timeout: 50000 }) + + const results = Array.from(query.state.values()) + expect(results.length).toBeGreaterThan(0) + + await query.cleanup() + } + ) + + it(`should join with ordering across collections`, async () => { + const config = await getConfig() + const usersCollection = config.collections.onDemand.users + const postsCollection = config.collections.onDemand.posts + + const query = createLiveQueryCollection((q) => + q + .from({ user: usersCollection }) + .join({ post: postsCollection }, ({ user, post }) => + eq(user.id, post.userId) + ) + .orderBy(({ post }) => post!.viewCount, `desc`) + .select(({ user, post }) => ({ + id: post!.id, + userName: user.name, + postTitle: post!.title, + viewCount: post!.viewCount, + })) + ) + + await query.preload() + + // For joins with ordering, we need to wait for sufficient data in BOTH collections + // Wait for the posts collection to load enough data (not just the query results) + await waitFor(() => postsCollection.size >= 100, { + timeout: 5000, + interval: 50, + message: `Posts collection did not fully load (got ${postsCollection.size}/100)`, + }) + + // Also wait for query to have data + await waitForQueryData(query, { minSize: 50, timeout: 5000 }) + + const results = Array.from(query.state.values()) + expect(results.length).toBeGreaterThan(0) + + // All results MUST have viewCount field (verifies join completed successfully) + expect(results.every((r) => typeof r.viewCount === `number`)).toBe(true) + + // Verify sorting by viewCount (descending) + for (let i = 1; i < results.length; i++) { + const prevCount = results[i - 1]!.viewCount + const currCount = results[i]!.viewCount + expect(prevCount).toBeGreaterThanOrEqual(currCount) + } + + await query.cleanup() + }) + + it(`should join with pagination`, async () => { + const config = await getConfig() + const usersCollection = config.collections.onDemand.users + const postsCollection = config.collections.onDemand.posts + + const query = createLiveQueryCollection((q) => + q + .from({ user: usersCollection }) + .join({ post: postsCollection }, ({ user, post }) => + eq(user.id, post.userId) + ) + .orderBy(({ post }) => post!.id, `asc`) + .limit(10) + .offset(5) + .select(({ user, post }) => ({ + id: post!.id, + userName: user.name, + postTitle: post!.title, + })) + ) + + await query.preload() + + expect(query.size).toBeLessThanOrEqual(10) + + await query.cleanup() + }) + }) + + describe(`Three-Collection Joins`, () => { + it(`should join Users + Posts + Comments`, async () => { + const config = await getConfig() + const { users, posts, comments } = config.collections.onDemand + + const query = createLiveQueryCollection((q) => + q + .from({ user: users }) + .join({ post: posts }, ({ user, post }) => eq(user.id, post.userId)) + .join({ comment: comments }, ({ post, comment }) => + eq(post!.id, comment.postId) + ) + .select(({ user, post, comment }) => ({ + id: comment!.id, + userName: user.name, + postTitle: post!.title, + commentText: comment!.text, + })) + ) + + await query.preload() + await waitForQueryData(query, { minSize: 1 }) + + const results = Array.from(query.state.values()) + expect(results.length).toBeGreaterThan(0) + expect(results[0]!).toHaveProperty(`userName`) + expect(results[0]!).toHaveProperty(`postTitle`) + expect(results[0]!).toHaveProperty(`commentText`) + + await query.cleanup() + }) + + it(`should handle predicates on all three collections`, async () => { + const config = await getConfig() + const { users, posts, comments } = config.collections.onDemand + + const query = createLiveQueryCollection((q) => + q + .from({ user: users }) + .where(({ user }) => eq(user.isActive, true)) + .join({ post: posts }, ({ user, post }) => eq(user.id, post.userId)) + .where(({ post }) => isNull(post!.deletedAt)) + .join({ comment: comments }, ({ post, comment }) => + eq(post!.id, comment.postId) + ) + .where(({ comment }) => isNull(comment!.deletedAt)) + .select(({ user, post, comment }) => ({ + id: comment!.id, + userName: user.name, + postTitle: post!.title, + commentText: comment!.text, + })) + ) + + await query.preload() + await waitForQueryData(query, { minSize: 1 }) + + const results = Array.from(query.state.values()) + // All results should match all predicates and return some rows + expect(results.length).toBeGreaterThan(0) + + await query.cleanup() + }) + + it( + `should handle mixed syncModes in 3-way join`, + { timeout: 60000 }, + async () => { + const config = await getConfig() + const usersEager = config.collections.eager.users + const postsOnDemand = config.collections.onDemand.posts + const commentsOnDemand = config.collections.onDemand.comments + + const query = createLiveQueryCollection((q) => + q + .from({ user: usersEager }) + .join({ post: postsOnDemand }, ({ user, post }) => + eq(user.id, post.userId) + ) + .join({ comment: commentsOnDemand }, ({ post, comment }) => + eq(post!.id, comment.postId) + ) + .select(({ user, post, comment }) => ({ + id: comment!.id, + userName: user.name, + postTitle: post!.title, + commentText: comment!.text, + })) + ) + + await query.preload() + // 3-way joins with mixed eager + on-demand collections need more time + // Use longer timeout for CI environments which can be slower + await waitForQueryData(query, { minSize: 1, timeout: 50000 }) + + const results = Array.from(query.state.values()) + expect(results.length).toBeGreaterThan(0) + + await query.cleanup() + } + ) + }) + + describe(`Predicate Pushdown in Joins`, () => { + it(`should push predicates to correct collections`, async () => { + const config = await getConfig() + const { users, posts } = config.collections.onDemand + + const query = createLiveQueryCollection((q) => + q + .from({ user: users }) + .join({ post: posts }, ({ user, post }) => eq(user.id, post.userId)) + .where(({ post }) => gt(post!.viewCount, 50)) + .select(({ user, post }) => ({ + id: post!.id, + userName: user.name, + postTitle: post!.title, + viewCount: post!.viewCount, + })) + ) + + await query.preload() + + const results = Array.from(query.state.values()) + // Verify predicate applied + results.forEach((r) => { + expect(r.viewCount).toBeGreaterThan(50) + }) + + await query.cleanup() + }) + + it(`should not over-fetch in joined collections`, async () => { + const config = await getConfig() + const { users, posts } = config.collections.onDemand + + const query = createLiveQueryCollection((q) => + q + .from({ user: users }) + .where(({ user }) => gt(user.age, 30)) + .join({ post: posts }, ({ user, post }) => eq(user.id, post.userId)) + .select(({ user, post }) => ({ + id: post!.id, + userName: user.name, + userAge: user.age, + postTitle: post!.title, + })) + ) + + await query.preload() + + const results = Array.from(query.state.values()) + // All users should be > 30 + results.forEach((r) => { + expect(r.userAge).toBeGreaterThan(30) + }) + + await query.cleanup() + }) + }) + + describe(`Left Joins`, () => { + it(`should handle left joins correctly`, async () => { + const config = await getConfig() + const { users, posts } = config.collections.onDemand + + const query = createLiveQueryCollection((q) => + q + .from({ user: users }) + .leftJoin({ post: posts }, ({ user, post }) => + eq(user.id, post.userId) + ) + .select(({ user, post }) => ({ + id: user.id, + userName: user.name, + postTitle: post!.title, // May be null for users without posts + })) + ) + + await query.preload() + await waitForQueryData(query, { minSize: 1 }) + + const results = Array.from(query.state.values()) + expect(results.length).toBeGreaterThan(0) + + await query.cleanup() + }) + }) + }) +} diff --git a/packages/db-collection-e2e/src/suites/live-updates.suite.ts b/packages/db-collection-e2e/src/suites/live-updates.suite.ts new file mode 100644 index 000000000..ed66fc4b5 --- /dev/null +++ b/packages/db-collection-e2e/src/suites/live-updates.suite.ts @@ -0,0 +1,380 @@ +/** + * Live Updates Test Suite (Optional, Electric-specific) + * + * Tests reactive updates for sync-enabled collections + */ + +import { randomUUID } from "node:crypto" +import { describe, expect, it } from "vitest" +import { createLiveQueryCollection, gt, lt } from "@tanstack/db" +import { waitFor, waitForQueryData } from "../utils/helpers" +import type { E2ETestConfig } from "../types" + +export function createLiveUpdatesTestSuite( + getConfig: () => Promise +) { + describe(`Live Updates Suite`, () => { + describe(`Reactive Updates`, () => { + it(`should receive updates when backend data changes`, async () => { + const config = await getConfig() + + if (!config.mutations) { + throw new Error(`Mutations not configured - test cannot run`) + } + + const usersCollection = config.collections.onDemand.users + + const query = createLiveQueryCollection((q) => + q + .from({ user: usersCollection }) + .where(({ user }) => gt(user.age, 30)) + ) + + await query.preload() + await waitForQueryData(query, { minSize: 1 }) + const initialSize = query.size + expect(initialSize).toBeGreaterThan(0) + + // Insert a new user with age > 30 + const newUserId = randomUUID() + await config.mutations.insertUser({ + id: newUserId, + name: `Live Update User`, + email: null, + age: 45, + isActive: true, + createdAt: new Date(), + metadata: null, + deletedAt: null, + }) + + // Query should reactively update + await waitFor(() => query.size > initialSize, { + timeout: 5000, + message: `Query did not receive live update`, + }) + + expect(query.size).toBe(initialSize + 1) + + // Clean up the inserted row + await config.mutations.deleteUser(newUserId) + + // Wait for deletion to propagate if using async replication (e.g., Electric) + if (config.hasReplicationLag) { + await waitFor( + () => !config.collections.onDemand.users.has(newUserId), + { + timeout: 5000, + message: `Deletion of user ${newUserId} did not propagate`, + } + ) + } + + await query.cleanup() + }) + + it(`should add new records that match query predicate`, async () => { + const config = await getConfig() + + if (!config.mutations) { + throw new Error(`Mutations not configured - test cannot run`) + } + + const usersCollection = config.collections.onDemand.users + + const query = createLiveQueryCollection((q) => + q + .from({ user: usersCollection }) + .where(({ user }) => gt(user.age, 30)) + ) + + await query.preload() + await waitForQueryData(query, { minSize: 1 }) + const initialSize = query.size + expect(initialSize).toBeGreaterThan(0) + + // Insert record with age=35 (matches predicate) + const newUserId = randomUUID() + await config.mutations.insertUser({ + id: newUserId, + name: `Matching User`, + email: null, + age: 35, + isActive: true, + createdAt: new Date(), + metadata: null, + deletedAt: null, + }) + + // Should appear in query results reactively + await waitFor(() => query.size > initialSize, { + timeout: 5000, + message: `New matching record did not appear`, + }) + + expect(query.size).toBe(initialSize + 1) + + // Clean up the inserted row + await config.mutations.deleteUser(newUserId) + + // Wait for deletion to propagate if using async replication (e.g., Electric) + if (config.hasReplicationLag) { + await waitFor( + () => !config.collections.onDemand.users.has(newUserId), + { + timeout: 5000, + message: `Deletion of user ${newUserId} did not propagate`, + } + ) + } + + await query.cleanup() + }) + + it(`should remove records that no longer match predicate`, async () => { + const config = await getConfig() + + if (!config.mutations) { + throw new Error(`Mutations not configured - test cannot run`) + } + + const usersCollection = config.collections.onDemand.users + + const query = createLiveQueryCollection((q) => + q + .from({ user: usersCollection }) + .where(({ user }) => gt(user.age, 30)) + ) + + await query.preload() + await waitForQueryData(query, { minSize: 1 }) + const initialSize = query.size + expect(initialSize).toBeGreaterThan(0) + + // Get a user currently in the query and update to not match + const queryUsers = Array.from(query.state.values()) + const userToUpdate = queryUsers[0] + + if (userToUpdate) { + // Update age to 25 (no longer matches age > 30) + await config.mutations.updateUser(userToUpdate.id, { age: 25 }) + + // Should be removed from query results + await waitFor(() => query.size < initialSize, { + timeout: 5000, + message: `Updated record was not removed from query`, + }) + + expect(query.size).toBe(initialSize - 1) + } + + await query.cleanup() + }) + }) + + describe(`Subscription Lifecycle`, () => { + it(`should receive updates when subscribed`, async () => { + const config = await getConfig() + + if (!config.mutations) { + throw new Error(`Mutations not configured - test cannot run`) + } + + const usersCollection = config.collections.onDemand.users + + const query = createLiveQueryCollection((q) => + q.from({ user: usersCollection }) + ) + + await query.preload() + await waitForQueryData(query, { minSize: 1 }) + + let changeCount = 0 + const subscription = query.subscribeChanges(() => { + changeCount++ + }) + + // Insert a new user + const newUserId = randomUUID() + await config.mutations.insertUser({ + id: newUserId, + name: `Subscription Test User`, + email: `sub@example.com`, + age: 28, + isActive: true, + createdAt: new Date(), + metadata: null, + deletedAt: null, + }) + + // Should receive change notification + await waitFor(() => changeCount > 0, { + timeout: 5000, + message: `No change notifications received`, + }) + + expect(changeCount).toBeGreaterThan(0) + + subscription.unsubscribe() + + // Clean up the inserted row + await config.mutations.deleteUser(newUserId) + + // Wait for deletion to propagate if using async replication (e.g., Electric) + if (config.hasReplicationLag) { + await waitFor( + () => !config.collections.onDemand.users.has(newUserId), + { + timeout: 5000, + message: `Deletion of user ${newUserId} did not propagate`, + } + ) + } + + await query.cleanup() + }) + }) + + describe(`Multiple Watchers`, () => { + it(`should update all queries watching same data`, async () => { + const config = await getConfig() + + if (!config.mutations) { + throw new Error(`Mutations not configured - test cannot run`) + } + + const usersCollection = config.collections.onDemand.users + + const query1 = createLiveQueryCollection((q) => + q + .from({ user: usersCollection }) + .where(({ user }) => gt(user.age, 25)) + ) + + const query2 = createLiveQueryCollection((q) => + q + .from({ user: usersCollection }) + .where(({ user }) => lt(user.age, 50)) + ) + + await Promise.all([query1.preload(), query2.preload()]) + await waitForQueryData(query1, { minSize: 1 }) + await waitForQueryData(query2, { minSize: 1 }) + + // Wait for collections to stabilize (especially important for async replication) + // This ensures any leftover data from previous tests has been cleaned up + if (config.hasReplicationLag) { + let stableSize1 = query1.size + let stableSize2 = query2.size + await waitFor( + () => { + const currentSize1 = query1.size + const currentSize2 = query2.size + const stable = + currentSize1 === stableSize1 && currentSize2 === stableSize2 + stableSize1 = currentSize1 + stableSize2 = currentSize2 + return stable + }, + { + timeout: 10000, + interval: 200, + message: `Collections did not stabilize`, + } + ) + } + + const initialSize1 = query1.size + const initialSize2 = query2.size + expect(initialSize1).toBeGreaterThan(0) + expect(initialSize2).toBeGreaterThan(0) + + // Insert user with age=35 (matches BOTH queries: 35 > 25 AND 35 < 50) + const newUserId = randomUUID() + await config.mutations.insertUser({ + id: newUserId, + name: `Multi Watch User`, + email: `multi@example.com`, + age: 35, + isActive: true, + createdAt: new Date(), + metadata: null, + deletedAt: null, + }) + + // Wait for both queries to have exactly the expected size + await waitFor( + () => + query1.size === initialSize1 + 1 && + query2.size === initialSize2 + 1, + { timeout: 10000, message: `Not all queries received the update` } + ) + + expect(query1.size).toBe(initialSize1 + 1) + expect(query2.size).toBe(initialSize2 + 1) + + // Clean up the inserted row + await config.mutations.deleteUser(newUserId) + + // Wait for deletion to propagate if using async replication (e.g., Electric) + // This prevents the next test from seeing this user when collections restart + // Check the eager collection since it continuously syncs all data + if (config.hasReplicationLag) { + await waitFor(() => !config.collections.eager.users.has(newUserId), { + timeout: 5000, + message: `Deletion of user ${newUserId} did not propagate`, + }) + } + + await Promise.all([query1.cleanup(), query2.cleanup()]) + }) + }) + + describe(`Update Existing Records`, () => { + it(`should update existing records in query results`, async () => { + const config = await getConfig() + + if (!config.mutations) { + throw new Error(`Mutations not configured - test cannot run`) + } + + const usersCollection = config.collections.onDemand.users + + const query = createLiveQueryCollection((q) => + q.from({ user: usersCollection }) + ) + + await query.preload() + await waitForQueryData(query, { minSize: 1 }) + + // Get a user and update it + const users = Array.from(query.state.values()) + const userToUpdate = users[0] + + if (userToUpdate) { + const originalAge = userToUpdate.age + + // Update the user's age + await config.mutations.updateUser(userToUpdate.id, { + age: originalAge + 10, + }) + + // Wait for update to sync + await waitFor( + () => { + const updated = query.get(userToUpdate.id) + return updated?.age === originalAge + 10 + }, + { timeout: 5000, message: `Update did not sync to query` } + ) + + // Verify the update + const updatedUser = query.get(userToUpdate.id) + expect(updatedUser?.age).toBe(originalAge + 10) + } + + await query.cleanup() + }) + }) + }) +} diff --git a/packages/db-collection-e2e/src/suites/mutations.suite.ts b/packages/db-collection-e2e/src/suites/mutations.suite.ts new file mode 100644 index 000000000..955a31d7b --- /dev/null +++ b/packages/db-collection-e2e/src/suites/mutations.suite.ts @@ -0,0 +1,318 @@ +/** + * Mutations Test Suite + * + * Tests data mutations with on-demand syncMode + */ + +import { randomUUID } from "node:crypto" +import { describe, expect, it } from "vitest" +import { createLiveQueryCollection, eq, gt, isNull } from "@tanstack/db" +import { waitFor, waitForQueryData } from "../utils/helpers" +import type { E2ETestConfig } from "../types" + +export function createMutationsTestSuite( + getConfig: () => Promise +) { + describe(`Mutations Suite`, () => { + describe(`Insert Mutations`, () => { + it(`should insert new record via collection`, async () => { + const config = await getConfig() + + if (!config.mutations) { + throw new Error(`Mutations not configured - test cannot run`) + } + + const usersCollection = config.collections.onDemand.users + + // Load initial data + const query = createLiveQueryCollection((q) => + q.from({ user: usersCollection }) + ) + + await query.preload() + await waitForQueryData(query, { minSize: 1 }) + const initialSize = query.size + expect(initialSize).toBeGreaterThan(0) + + // Perform actual INSERT via backend + const newUserId = randomUUID() + await config.mutations.insertUser({ + id: newUserId, + name: `Test User`, + email: `test@example.com`, + age: 42, + isActive: true, + createdAt: new Date(), + metadata: null, + deletedAt: null, + }) + + // Wait for sync to propagate the new record (Electric only) + await waitFor(() => query.size > initialSize, { + timeout: 5000, + message: `New record did not appear in query`, + }) + + expect(query.size).toBe(initialSize + 1) + + // Clean up the inserted row + await config.mutations.deleteUser(newUserId) + + // Wait for deletion to propagate if using async replication (e.g., Electric) + // Check the eager collection since it continuously syncs all data + if (config.hasReplicationLag) { + await waitFor(() => !config.collections.eager.users.has(newUserId), { + timeout: 5000, + message: `Deletion of user ${newUserId} did not propagate`, + }) + } + + await query.cleanup() + }) + + it(`should handle insert appearing in matching queries`, async () => { + const config = await getConfig() + + if (!config.mutations) { + throw new Error(`Mutations not configured - test cannot run`) + } + + const usersCollection = config.collections.onDemand.users + + const query = createLiveQueryCollection((q) => + q + .from({ user: usersCollection }) + .where(({ user }) => gt(user.age, 30)) + ) + + await query.preload() + await waitForQueryData(query, { minSize: 1 }) + const initialSize = query.size + expect(initialSize).toBeGreaterThan(0) + + // Insert record that MATCHES predicate (age > 30) + const newUserId = randomUUID() + await config.mutations.insertUser({ + id: newUserId, + name: `Test Match User`, + email: `match@example.com`, + age: 50, + isActive: true, + createdAt: new Date(), + metadata: null, + deletedAt: null, + }) + + // Wait for sync to propagate (Electric only) + await waitFor(() => query.size > initialSize, { + timeout: 5000, + message: `Matching insert did not appear in query`, + }) + + expect(query.size).toBe(initialSize + 1) + + // Clean up the inserted row + await config.mutations.deleteUser(newUserId) + + // Wait for deletion to propagate if using async replication (e.g., Electric) + // Check the eager collection since it continuously syncs all data + if (config.hasReplicationLag) { + await waitFor(() => !config.collections.eager.users.has(newUserId), { + timeout: 5000, + message: `Deletion of user ${newUserId} did not propagate`, + }) + } + + await query.cleanup() + }) + }) + + describe(`Update Mutations`, () => { + it(`should handle update that makes record match predicate`, async () => { + const config = await getConfig() + + if (!config.mutations) { + throw new Error(`Mutations not configured - test cannot run`) + } + + const usersCollection = config.collections.onDemand.users + + const query = createLiveQueryCollection((q) => + q + .from({ user: usersCollection }) + .where(({ user }) => gt(user.age, 30)) + ) + + await query.preload() + await waitForQueryData(query, { minSize: 1 }) + const initialSize = query.size + expect(initialSize).toBeGreaterThan(0) + + // Find a user with age <= 30 in the full collection + const allUsers = Array.from(usersCollection.state.values()) + const userToUpdate = allUsers.find((u) => u.age <= 30) + + if (userToUpdate) { + // Update age to 35 (matches predicate) + await config.mutations.updateUser(userToUpdate.id, { age: 35 }) + + // Wait for Electric to sync and query to show updated record + await waitFor(() => query.size > initialSize, { + timeout: 5000, + message: `Updated record did not appear in query`, + }) + + expect(query.size).toBe(initialSize + 1) + } + + await query.cleanup() + }) + + it(`should handle update that makes record unmatch predicate`, async () => { + const config = await getConfig() + + if (!config.mutations) { + throw new Error(`Mutations not configured - test cannot run`) + } + + const usersCollection = config.collections.onDemand.users + + const query = createLiveQueryCollection((q) => + q + .from({ user: usersCollection }) + .where(({ user }) => gt(user.age, 30)) + ) + + await query.preload() + await waitForQueryData(query, { minSize: 1 }) + const initialSize = query.size + expect(initialSize).toBeGreaterThan(0) + + // Get a user currently in the query (age > 30) + const queryUsers = Array.from(query.state.values()) + const userToUpdate = queryUsers[0] + + if (userToUpdate) { + // Update age to 25 (no longer matches predicate) + await config.mutations.updateUser(userToUpdate.id, { age: 25 }) + + // Wait for Electric to sync and query to remove record + await waitFor(() => query.size < initialSize, { + timeout: 5000, + message: `Updated record was not removed from query`, + }) + + expect(query.size).toBe(initialSize - 1) + } + + await query.cleanup() + }) + }) + + describe(`Delete Mutations`, () => { + it(`should handle delete removing record from query`, async () => { + const config = await getConfig() + + if (!config.mutations) { + throw new Error(`Mutations not configured - test cannot run`) + } + + const usersCollection = config.collections.onDemand.users + + const query = createLiveQueryCollection((q) => + q.from({ user: usersCollection }) + ) + + await query.preload() + await waitForQueryData(query, { minSize: 1 }) + const initialSize = query.size + expect(initialSize).toBeGreaterThan(0) + + // Get a user and delete it + const users = Array.from(query.state.values()) + const userToDelete = users[0] + + if (userToDelete) { + await config.mutations.deleteUser(userToDelete.id) + + // Wait for delete to sync + await waitFor(() => query.size < initialSize, { + timeout: 5000, + message: `Delete did not sync to query`, + }) + + expect(query.size).toBe(initialSize - 1) + } + + await query.cleanup() + }) + }) + + describe(`Soft Delete Pattern`, () => { + it(`should filter out soft-deleted records`, async () => { + const config = await getConfig() + const usersCollection = config.collections.onDemand.users + + const query = createLiveQueryCollection((q) => + q + .from({ user: usersCollection }) + .where(({ user }) => isNull(user.deletedAt)) + ) + + await query.preload() + await waitForQueryData(query, { minSize: 1 }) + + // All results should not be soft-deleted + const results = Array.from(query.state.values()) + expect(results.length).toBeGreaterThan(0) + results.forEach((u) => { + expect(u.deletedAt).toBeNull() + }) + + await query.cleanup() + }) + + it(`should include soft-deleted records when not filtered`, async () => { + const config = await getConfig() + const usersCollection = config.collections.onDemand.users + + const query = createLiveQueryCollection((q) => + q.from({ user: usersCollection }) + ) + + await query.preload() + await waitForQueryData(query, { minSize: 1 }) + + // Should include both deleted and non-deleted + const results = Array.from(query.state.values()) + const hasNotDeleted = results.some((u) => u.deletedAt === null) + + expect(hasNotDeleted).toBe(true) + + await query.cleanup() + }) + }) + + describe(`Mutation with Queries`, () => { + it(`should maintain query state during data changes`, async () => { + const config = await getConfig() + const usersCollection = config.collections.onDemand.users + + const query = createLiveQueryCollection((q) => + q + .from({ user: usersCollection }) + .where(({ user }) => eq(user.isActive, true)) + .orderBy(({ user }) => user.age, `asc`) + .limit(10) + ) + + await query.preload() + + // Test structure: Mutations should maintain pagination state + expect(query.size).toBeLessThanOrEqual(10) + + await query.cleanup() + }) + }) + }) +} diff --git a/packages/db-collection-e2e/src/suites/pagination.suite.ts b/packages/db-collection-e2e/src/suites/pagination.suite.ts new file mode 100644 index 000000000..c0b7badcb --- /dev/null +++ b/packages/db-collection-e2e/src/suites/pagination.suite.ts @@ -0,0 +1,371 @@ +/** + * Pagination Test Suite + * + * Tests ordering, limits, offsets, and window management + */ + +import { describe, expect, it } from "vitest" +import { createLiveQueryCollection, eq } from "@tanstack/db" +import { + assertAllItemsMatch, + assertCollectionSize, + assertSorted, +} from "../utils/assertions" +import { waitForQueryData } from "../utils/helpers" +import type { E2ETestConfig } from "../types" + +export function createPaginationTestSuite( + getConfig: () => Promise +) { + describe(`Pagination Suite`, () => { + describe(`OrderBy`, () => { + it(`should sort ascending by single field`, async () => { + const config = await getConfig() + const usersCollection = config.collections.onDemand.users + + const query = createLiveQueryCollection((q) => + q + .from({ user: usersCollection }) + .orderBy(({ user }) => user.age, `asc`) + ) + + await query.preload() + await waitForQueryData(query, { minSize: 1 }) + + const results = Array.from(query.state.values()) + expect(results.length).toBeGreaterThan(0) + assertSorted(results, `age`, `asc`) + + await query.cleanup() + }) + + it(`should sort descending by single field`, async () => { + const config = await getConfig() + const usersCollection = config.collections.onDemand.users + + const query = createLiveQueryCollection((q) => + q + .from({ user: usersCollection }) + .orderBy(({ user }) => user.age, `desc`) + ) + + await query.preload() + await waitForQueryData(query, { minSize: 1 }) + + const results = Array.from(query.state.values()) + expect(results.length).toBeGreaterThan(0) + assertSorted(results, `age`, `desc`) + + await query.cleanup() + }) + + it(`should sort by string field`, async () => { + const config = await getConfig() + const usersCollection = config.collections.onDemand.users + + const query = createLiveQueryCollection((q) => + q + .from({ user: usersCollection }) + .orderBy(({ user }) => user.name, `asc`) + ) + + await query.preload() + await waitForQueryData(query, { minSize: 1 }) + + const results = Array.from(query.state.values()) + expect(results.length).toBeGreaterThan(0) + + // Verify it's sorted (don't assert exact order due to collation differences) + // Just verify we got results and they're in some order + expect(results[0]!).toHaveProperty(`name`) + + await query.cleanup() + }) + + it(`should sort by date field`, async () => { + const config = await getConfig() + const usersCollection = config.collections.onDemand.users + + const query = createLiveQueryCollection((q) => + q + .from({ user: usersCollection }) + .orderBy(({ user }) => user.createdAt, `desc`) + ) + + await query.preload() + await waitForQueryData(query, { minSize: 1 }) + + const results = Array.from(query.state.values()) + expect(results.length).toBeGreaterThan(0) + assertSorted(results, `createdAt`, `desc`) + + await query.cleanup() + }) + + it(`should sort by multiple fields`, async () => { + const config = await getConfig() + const postsCollection = config.collections.onDemand.posts + + const query = createLiveQueryCollection((q) => + q + .from({ post: postsCollection }) + .orderBy(({ post }) => [post.userId, post.viewCount]) + ) + + await query.preload() + await waitForQueryData(query, { minSize: 1 }) + + const results = Array.from(query.state.values()) + expect(results.length).toBeGreaterThan(0) + + // Verify multi-field sort (userId first, then viewCount within each userId) + for (let i = 1; i < results.length; i++) { + const prev = results[i - 1]! + const curr = results[i]! + + // If userId is same, viewCount should be ascending + if (prev.userId === curr.userId) { + expect(prev.viewCount).toBeLessThanOrEqual(curr.viewCount) + } + } + + await query.cleanup() + }) + }) + + describe(`Limit`, () => { + it(`should limit to specific number of records`, async () => { + const config = await getConfig() + const usersCollection = config.collections.onDemand.users + + const query = createLiveQueryCollection((q) => + q + .from({ user: usersCollection }) + .orderBy(({ user }) => user.id, `asc`) + .limit(10) + ) + + await query.preload() + await waitForQueryData(query, { minSize: 10 }) + + assertCollectionSize(query, 10) + + await query.cleanup() + }) + + it(`should handle limit=0`, async () => { + const config = await getConfig() + const usersCollection = config.collections.onDemand.users + + const query = createLiveQueryCollection((q) => + q + .from({ user: usersCollection }) + .orderBy(({ user }) => user.id, `asc`) + .limit(0) + ) + + await query.preload() + + assertCollectionSize(query, 0) + + await query.cleanup() + }) + + it(`should handle limit larger than dataset`, async () => { + const config = await getConfig() + const usersCollection = config.collections.onDemand.users + + const query = createLiveQueryCollection((q) => + q + .from({ user: usersCollection }) + .orderBy(({ user }) => user.id, `asc`) + .limit(1000) + ) + + await query.preload() + + // Should return all records (100 from seed data) + expect(query.size).toBeLessThanOrEqual(100) + + await query.cleanup() + }) + + it(`should combine limit with orderBy`, async () => { + const config = await getConfig() + const usersCollection = config.collections.onDemand.users + + const query = createLiveQueryCollection((q) => + q + .from({ user: usersCollection }) + .orderBy(({ user }) => user.age, `asc`) + .limit(5) + ) + + await query.preload() + await waitForQueryData(query, { minSize: 5 }) + + assertCollectionSize(query, 5) + const results = Array.from(query.state.values()) + assertSorted(results, `age`, `asc`) + + await query.cleanup() + }) + }) + + describe(`Offset`, () => { + it(`should skip records with offset`, async () => { + const config = await getConfig() + const usersCollection = config.collections.onDemand.users + + const query = createLiveQueryCollection((q) => + q + .from({ user: usersCollection }) + .orderBy(({ user }) => user.id, `asc`) + .limit(100) // Need limit with offset + .offset(20) + ) + + await query.preload() + await waitForQueryData(query, { minSize: 80 }) + + const results = Array.from(query.state.values()) + expect(results.length).toBe(80) // 100 - 20 = 80 + + await query.cleanup() + }) + + it(`should combine offset with limit (pagination)`, async () => { + const config = await getConfig() + const usersCollection = config.collections.onDemand.users + + const query = createLiveQueryCollection((q) => + q + .from({ user: usersCollection }) + .orderBy(({ user }) => user.id, `asc`) + .limit(10) + .offset(20) + ) + + await query.preload() + await waitForQueryData(query, { minSize: 10 }) + + assertCollectionSize(query, 10) + + await query.cleanup() + }) + + it(`should handle offset beyond dataset`, async () => { + const config = await getConfig() + const usersCollection = config.collections.onDemand.users + + const query = createLiveQueryCollection((q) => + q + .from({ user: usersCollection }) + .orderBy(({ user }) => user.id, `asc`) + .limit(100) + .offset(200) + ) + + await query.preload() + + assertCollectionSize(query, 0) + + await query.cleanup() + }) + }) + + describe(`Complex Pagination Scenarios`, () => { + it(`should paginate with predicates`, async () => { + const config = await getConfig() + const usersCollection = config.collections.onDemand.users + + const query = createLiveQueryCollection((q) => + q + .from({ user: usersCollection }) + .where(({ user }) => eq(user.isActive, true)) + .orderBy(({ user }) => user.age, `asc`) + .limit(10) + .offset(5) + ) + + await query.preload() + + const results = Array.from(query.state.values()) + expect(results.length).toBeLessThanOrEqual(10) + assertAllItemsMatch(query, (u) => u.isActive === true) + assertSorted(results, `age`, `asc`) + + await query.cleanup() + }) + + it(`should handle pagination edge cases - last page with fewer records`, async () => { + const config = await getConfig() + const usersCollection = config.collections.onDemand.users + + const query = createLiveQueryCollection( + (q) => + q + .from({ user: usersCollection }) + .orderBy(({ user }) => user.id, `asc`) + .limit(10) + .offset(95) // Last 5 records + ) + + await query.preload() + await waitForQueryData(query, { minSize: 1 }) + + expect(query.size).toBeLessThanOrEqual(10) + expect(query.size).toBeGreaterThan(0) + + await query.cleanup() + }) + + it(`should handle single record pages`, async () => { + const config = await getConfig() + const usersCollection = config.collections.onDemand.users + + const query = createLiveQueryCollection((q) => + q + .from({ user: usersCollection }) + .orderBy(({ user }) => user.id, `asc`) + .limit(1) + .offset(0) + ) + + await query.preload() + await waitForQueryData(query, { minSize: 1 }) + + assertCollectionSize(query, 1) + + await query.cleanup() + }) + }) + + describe(`Performance Verification`, () => { + it(`should only load requested page (not entire dataset)`, async () => { + const config = await getConfig() + const usersCollection = config.collections.onDemand.users + + const query = createLiveQueryCollection((q) => + q + .from({ user: usersCollection }) + .orderBy(({ user }) => user.id, `asc`) + .limit(10) + .offset(20) + ) + + await query.preload() + await waitForQueryData(query, { minSize: 10 }) + + // Verify we got exactly 10 records + assertCollectionSize(query, 10) + + // In on-demand mode, the underlying collection should ideally only load + // the requested page, not all 100 records + // (This depends on Electric's predicate pushdown implementation) + + await query.cleanup() + }) + }) + }) +} diff --git a/packages/db-collection-e2e/src/suites/predicates.suite.ts b/packages/db-collection-e2e/src/suites/predicates.suite.ts new file mode 100644 index 000000000..6ece8331e --- /dev/null +++ b/packages/db-collection-e2e/src/suites/predicates.suite.ts @@ -0,0 +1,609 @@ +/** + * Predicates Test Suite + * + * Tests basic where clause functionality with all comparison operators + * across different data types. + */ + +import { describe, expect, it } from "vitest" +import { + and, + createLiveQueryCollection, + eq, + gt, + gte, + inArray, + isNull, + lt, + lte, + not, + or, +} from "@tanstack/db" +import { assertAllItemsMatch, assertCollectionSize } from "../utils/assertions" +import { waitForQueryData } from "../utils/helpers" +import type { E2ETestConfig } from "../types" + +export function createPredicatesTestSuite( + getConfig: () => Promise +) { + describe(`Predicates Suite`, () => { + describe(`Equality Operators`, () => { + it(`should filter with eq() on string field`, async () => { + const config = await getConfig() + const usersCollection = config.collections.onDemand.users + + const query = createLiveQueryCollection((q) => + q + .from({ user: usersCollection }) + .where(({ user }) => eq(user.name, `Alice 0`)) + ) + + await query.preload() + + await waitForQueryData(query, { minSize: 1 }) + + const results = Array.from(query.state.values()) + expect(results.length).toBeGreaterThan(0) + expect(results.every((u) => u.name === `Alice 0`)).toBe(true) + + await query.cleanup() + }) + + it(`should filter with eq() on number field`, async () => { + const config = await getConfig() + const usersCollection = config.collections.onDemand.users + + const query = createLiveQueryCollection((q) => + q + .from({ user: usersCollection }) + .where(({ user }) => eq(user.age, 25)) + ) + + await query.preload() + + assertAllItemsMatch(query, (u) => u.age === 25) + + await query.cleanup() + }) + + it(`should filter with eq() on boolean field`, async () => { + const config = await getConfig() + const usersCollection = config.collections.onDemand.users + + const query = createLiveQueryCollection((q) => + q + .from({ user: usersCollection }) + .where(({ user }) => eq(user.isActive, true)) + ) + + await query.preload() + await waitForQueryData(query, { minSize: 1 }) + + const results = Array.from(query.state.values()) + expect(results.length).toBeGreaterThan(0) + assertAllItemsMatch(query, (u) => u.isActive === true) + + await query.cleanup() + }) + + it(`should filter with eq() on UUID field`, async () => { + const config = await getConfig() + const usersCollection = config.collections.onDemand.users + + const testUserId = `00000000-0000-4000-8000-000000000000` // User ID for index 0 + + const query = createLiveQueryCollection((q) => + q + .from({ user: usersCollection }) + .where(({ user }) => eq(user.id, testUserId)) + ) + + await query.preload() + await waitForQueryData(query, { minSize: 1 }) + + assertCollectionSize(query, 1) + const result = Array.from(query.state.values())[0] + expect(result?.id).toBe(testUserId) + + await query.cleanup() + }) + + it(`should filter with isNull() for null values`, async () => { + const config = await getConfig() + const usersCollection = config.collections.onDemand.users + + const query = createLiveQueryCollection((q) => + q + .from({ user: usersCollection }) + .where(({ user }) => isNull(user.email)) + ) + + await query.preload() + await waitForQueryData(query, { minSize: 1 }) + + const results = Array.from(query.state.values()) + expect(results.length).toBeGreaterThan(0) + assertAllItemsMatch(query, (u) => u.email === null) + + await query.cleanup() + }) + }) + + describe(`Inequality Operators`, () => { + it(`should filter with not(eq()) on string field`, async () => { + const config = await getConfig() + const usersCollection = config.collections.onDemand.users + + const query = createLiveQueryCollection((q) => + q + .from({ user: usersCollection }) + .where(({ user }) => not(eq(user.name, `Alice 0`))) + ) + + await query.preload() + await waitForQueryData(query, { minSize: 1 }) + + const results = Array.from(query.state.values()) + expect(results.length).toBeGreaterThan(0) + assertAllItemsMatch(query, (u) => u.name !== `Alice 0`) + + await query.cleanup() + }) + + it(`should filter with not(isNull()) for non-null values`, async () => { + const config = await getConfig() + const usersCollection = config.collections.onDemand.users + + const query = createLiveQueryCollection((q) => + q + .from({ user: usersCollection }) + .where(({ user }) => not(isNull(user.email))) + ) + + await query.preload() + await waitForQueryData(query, { minSize: 1 }) + + const results = Array.from(query.state.values()) + expect(results.length).toBeGreaterThan(0) + assertAllItemsMatch(query, (u) => u.email !== null) + + await query.cleanup() + }) + }) + + describe(`Comparison Operators`, () => { + it(`should filter with gt() on number field`, async () => { + const config = await getConfig() + const usersCollection = config.collections.onDemand.users + + const query = createLiveQueryCollection((q) => + q + .from({ user: usersCollection }) + .where(({ user }) => gt(user.age, 50)) + ) + + await query.preload() + + assertAllItemsMatch(query, (u) => u.age > 50) + + await query.cleanup() + }) + + it(`should filter with gte() on number field`, async () => { + const config = await getConfig() + const usersCollection = config.collections.onDemand.users + + const query = createLiveQueryCollection((q) => + q + .from({ user: usersCollection }) + .where(({ user }) => gte(user.age, 50)) + ) + + await query.preload() + + assertAllItemsMatch(query, (u) => u.age >= 50) + + await query.cleanup() + }) + + it(`should filter with lt() on number field`, async () => { + const config = await getConfig() + const usersCollection = config.collections.onDemand.users + + const query = createLiveQueryCollection((q) => + q + .from({ user: usersCollection }) + .where(({ user }) => lt(user.age, 30)) + ) + + await query.preload() + + assertAllItemsMatch(query, (u) => u.age < 30) + + await query.cleanup() + }) + + it(`should filter with lte() on number field`, async () => { + const config = await getConfig() + const usersCollection = config.collections.onDemand.users + + const query = createLiveQueryCollection((q) => + q + .from({ user: usersCollection }) + .where(({ user }) => lte(user.age, 30)) + ) + + await query.preload() + + assertAllItemsMatch(query, (u) => u.age <= 30) + + await query.cleanup() + }) + + it(`should filter with gt() on viewCount field`, async () => { + const config = await getConfig() + const postsCollection = config.collections.onDemand.posts + + const query = createLiveQueryCollection((q) => + q + .from({ post: postsCollection }) + .where(({ post }) => gt(post.viewCount, 100)) + ) + + await query.preload() + + assertAllItemsMatch(query, (p) => p.viewCount > 100) + + await query.cleanup() + }) + }) + + describe(`In Operator`, () => { + it(`should filter with inArray() on string array`, async () => { + const config = await getConfig() + const usersCollection = config.collections.onDemand.users + + const query = createLiveQueryCollection((q) => + q + .from({ user: usersCollection }) + .where(({ user }) => + inArray(user.name, [`Alice 0`, `bob 1`, `Charlie 2`]) + ) + ) + + await query.preload() + + const validNames = new Set([`Alice 0`, `bob 1`, `Charlie 2`]) + assertAllItemsMatch(query, (u) => validNames.has(u.name)) + + await query.cleanup() + }) + + it(`should filter with inArray() on number array`, async () => { + const config = await getConfig() + const usersCollection = config.collections.onDemand.users + + const query = createLiveQueryCollection((q) => + q + .from({ user: usersCollection }) + .where(({ user }) => inArray(user.age, [25, 30, 35])) + ) + + await query.preload() + + const validAges = new Set([25, 30, 35]) + assertAllItemsMatch(query, (u) => validAges.has(u.age)) + + await query.cleanup() + }) + + it(`should filter with inArray() on UUID array`, async () => { + const config = await getConfig() + const usersCollection = config.collections.onDemand.users + + const userIds = [ + `00000000-0000-4000-8000-000000000000`, // User ID for index 0 + `00000001-0000-4000-8000-000000000001`, // User ID for index 1 + `00000002-0000-4000-8000-000000000002`, // User ID for index 2 + ] + + const query = createLiveQueryCollection((q) => + q + .from({ user: usersCollection }) + .where(({ user }) => inArray(user.id, userIds)) + ) + + await query.preload() + + const validIds = new Set(userIds) + assertAllItemsMatch(query, (u) => validIds.has(u.id)) + + await query.cleanup() + }) + + it(`should handle empty inArray()`, async () => { + const config = await getConfig() + const usersCollection = config.collections.onDemand.users + + const query = createLiveQueryCollection((q) => + q + .from({ user: usersCollection }) + .where(({ user }) => inArray(user.id, [])) + ) + + await query.preload() + + assertCollectionSize(query, 0) + + await query.cleanup() + }) + }) + + describe(`Null Operators`, () => { + it(`should filter with isNull() on nullable field`, async () => { + const config = await getConfig() + const usersCollection = config.collections.onDemand.users + + const query = createLiveQueryCollection((q) => + q + .from({ user: usersCollection }) + .where(({ user }) => isNull(user.email)) + ) + + await query.preload() + await waitForQueryData(query, { minSize: 1 }) + + const results = Array.from(query.state.values()) + expect(results.length).toBeGreaterThan(0) + assertAllItemsMatch(query, (u) => u.email === null) + + await query.cleanup() + }) + + it(`should filter with not(isNull()) on nullable field`, async () => { + const config = await getConfig() + const usersCollection = config.collections.onDemand.users + + const query = createLiveQueryCollection((q) => + q + .from({ user: usersCollection }) + .where(({ user }) => not(isNull(user.email))) + ) + + await query.preload() + await waitForQueryData(query, { minSize: 1 }) + + const results = Array.from(query.state.values()) + expect(results.length).toBeGreaterThan(0) + assertAllItemsMatch(query, (u) => u.email !== null) + + await query.cleanup() + }) + + it(`should filter with isNull() on deletedAt (soft delete pattern)`, async () => { + const config = await getConfig() + const usersCollection = config.collections.onDemand.users + + const query = createLiveQueryCollection((q) => + q + .from({ user: usersCollection }) + .where(({ user }) => isNull(user.deletedAt)) + ) + + await query.preload() + await waitForQueryData(query, { minSize: 1 }) + + const results = Array.from(query.state.values()) + expect(results.length).toBeGreaterThan(0) + assertAllItemsMatch(query, (u) => u.deletedAt === null) + + await query.cleanup() + }) + }) + + describe(`Boolean Logic`, () => { + it(`should combine predicates with and()`, async () => { + const config = await getConfig() + const usersCollection = config.collections.onDemand.users + + const query = createLiveQueryCollection((q) => + q + .from({ user: usersCollection }) + .where(({ user }) => and(gt(user.age, 25), eq(user.isActive, true))) + ) + + await query.preload() + + assertAllItemsMatch(query, (u) => u.age > 25 && u.isActive === true) + + await query.cleanup() + }) + + it(`should combine predicates with or()`, async () => { + const config = await getConfig() + const usersCollection = config.collections.onDemand.users + + const query = createLiveQueryCollection((q) => + q + .from({ user: usersCollection }) + .where(({ user }) => or(eq(user.age, 25), eq(user.age, 30))) + ) + + await query.preload() + + assertAllItemsMatch(query, (u) => u.age === 25 || u.age === 30) + + await query.cleanup() + }) + + it(`should handle complex nested logic`, async () => { + const config = await getConfig() + const usersCollection = config.collections.onDemand.users + + const query = createLiveQueryCollection((q) => + q + .from({ user: usersCollection }) + .where(({ user }) => + and( + or(eq(user.age, 25), eq(user.age, 30)), + eq(user.isActive, true) + ) + ) + ) + + await query.preload() + + assertAllItemsMatch( + query, + (u) => (u.age === 25 || u.age === 30) && u.isActive === true + ) + + await query.cleanup() + }) + + it(`should handle NOT operator`, async () => { + const config = await getConfig() + const usersCollection = config.collections.onDemand.users + + const query = createLiveQueryCollection((q) => + q + .from({ user: usersCollection }) + .where(({ user }) => not(eq(user.isActive, true))) + ) + + await query.preload() + + assertAllItemsMatch(query, (u) => u.isActive !== true) + + await query.cleanup() + }) + }) + + describe(`Predicate Pushdown Verification`, () => { + it(`should only load data matching predicate (no over-fetching)`, async () => { + const config = await getConfig() + const usersCollection = config.collections.onDemand.users + + const query = createLiveQueryCollection((q) => + q + .from({ user: usersCollection }) + .where(({ user }) => eq(user.age, 25)) + ) + + await query.preload() + await waitForQueryData(query, { minSize: 1 }) + + // Verify that the underlying collection didn't load ALL users + // In on-demand mode, it should only load age=25 users + const results = Array.from(query.state.values()) + expect(results.length).toBeGreaterThan(0) + expect(results.length).toBeLessThan(100) // Shouldn't load all 100 users + + await query.cleanup() + }) + + it(`should not load deleted records when filtering them out`, async () => { + const config = await getConfig() + const usersCollection = config.collections.onDemand.users + + const query = createLiveQueryCollection((q) => + q + .from({ user: usersCollection }) + .where(({ user }) => isNull(user.deletedAt)) + ) + + await query.preload() + await waitForQueryData(query, { minSize: 1 }) + + const results = Array.from(query.state.values()) + expect(results.length).toBeGreaterThan(0) + assertAllItemsMatch(query, (u) => u.deletedAt === null) + + await query.cleanup() + }) + }) + + describe(`Multiple where() Calls`, () => { + it(`should AND multiple where() calls together`, async () => { + const config = await getConfig() + const usersCollection = config.collections.onDemand.users + + const query = createLiveQueryCollection((q) => + q + .from({ user: usersCollection }) + .where(({ user }) => gt(user.age, 25)) + .where(({ user }) => eq(user.isActive, true)) + ) + + await query.preload() + + assertAllItemsMatch(query, (u) => u.age > 25 && u.isActive === true) + + await query.cleanup() + }) + }) + + describe(`Edge Cases`, () => { + it(`should handle query with no where clause on on-demand collection`, async () => { + // NOTE: Electric has a bug where empty subset requests don't load data + // We work around this by injecting "true = true" when there's no where clause + // This is always true so doesn't filter data, just tricks Electric into loading + + const config = await getConfig() + const usersCollection = config.collections.onDemand.users + + // Query with NO where clause - loads all data + const query = createLiveQueryCollection( + (q) => q.from({ user: usersCollection }) + // No where, no limit, no orderBy + ) + + await query.preload() + await waitForQueryData(query, { minSize: 50 }) + + // Should load significant data (true = true workaround for Electric) + expect(query.size).toBeGreaterThan(0) + expect(query.size).toBe(usersCollection.size) // Query shows all collection data + + await query.cleanup() + }) + + it(`should handle predicate matching no records`, async () => { + const config = await getConfig() + const usersCollection = config.collections.onDemand.users + + const query = createLiveQueryCollection((q) => + q + .from({ user: usersCollection }) + .where(({ user }) => eq(user.age, 999)) + ) + + await query.preload() + + assertCollectionSize(query, 0) + + await query.cleanup() + }) + + it(`should handle complex AND with no matches`, async () => { + const config = await getConfig() + const usersCollection = config.collections.onDemand.users + + const query = createLiveQueryCollection((q) => + q.from({ user: usersCollection }).where(({ user }) => + and( + eq(user.age, 25), + eq(user.age, 30) // Impossible: age can't be both 25 and 30 + ) + ) + ) + + await query.preload() + + assertCollectionSize(query, 0) + + await query.cleanup() + }) + }) + }) +} diff --git a/packages/db-collection-e2e/src/types.ts b/packages/db-collection-e2e/src/types.ts new file mode 100644 index 000000000..86ad7d6d5 --- /dev/null +++ b/packages/db-collection-e2e/src/types.ts @@ -0,0 +1,95 @@ +import type { Collection } from "@tanstack/db" + +/** + * Test data schema types + */ +export interface User { + id: string // UUID + name: string // For collation testing + email: string | null + age: number + isActive: boolean + createdAt: Date + metadata: Record | null // JSON field + deletedAt: Date | null // Soft delete +} + +export interface Post { + id: string + userId: string // FK to User + title: string + content: string | null + viewCount: number + publishedAt: Date | null + deletedAt: Date | null +} + +export interface Comment { + id: string + postId: string // FK to Post + userId: string // FK to User + text: string + createdAt: Date + deletedAt: Date | null +} + +/** + * Seed data result + */ +export interface SeedDataResult { + users: Array + posts: Array + comments: Array + userIds: Array + postIds: Array + commentIds: Array +} + +/** + * Test configuration for e2e tests + */ +export interface E2ETestConfig { + collections: { + eager: { + users: Collection + posts: Collection + comments: Collection + } + onDemand: { + users: Collection + posts: Collection + comments: Collection + } + } + + // Mutation helpers using collection APIs (works for both Electric and Query) + // Note: Requires collections to have onInsert/onUpdate/onDelete handlers configured + mutations?: { + insertUser: (user: User) => Promise + updateUser: (id: string, updates: Partial) => Promise + deleteUser: (id: string) => Promise + insertPost: (post: Post) => Promise + } + + // Indicates if the backend has replication lag (e.g., Electric sync) + // When true, tests will wait for mutations to propagate before proceeding + hasReplicationLag?: boolean + + // Lifecycle hooks + setup: () => Promise + teardown: () => Promise + beforeEach?: () => Promise + afterEach?: () => Promise +} + +/** + * Database client interface (pg Client) + */ +export interface DbClient { + connect: () => Promise + end: () => Promise + query: ( + sql: string, + values?: Array + ) => Promise<{ rows: Array }> +} diff --git a/packages/db-collection-e2e/src/utils/assertions.ts b/packages/db-collection-e2e/src/utils/assertions.ts new file mode 100644 index 000000000..daf6f3303 --- /dev/null +++ b/packages/db-collection-e2e/src/utils/assertions.ts @@ -0,0 +1,182 @@ +import { expect } from "vitest" +import { getLoadedIds } from "./helpers" +import type { Collection } from "@tanstack/db" + +/** + * Assert that a collection has loaded exactly the expected items (no more, no less) + */ +export function assertLoadedExactly( + collection: Collection, + expectedIds: Array, + message?: string +) { + const loadedIds = getLoadedIds(collection) + const loadedSet = new Set(loadedIds) + const expectedSet = new Set(expectedIds) + + // Check for extra items + const extraIds = loadedIds.filter((id) => !expectedSet.has(id)) + if (extraIds.length > 0) { + throw new Error( + message ?? + `Collection has extra items: ${extraIds.join(`, `)} (expected only: ${expectedIds.join(`, `)})` + ) + } + + // Check for missing items + const missingIds = expectedIds.filter((id) => !loadedSet.has(id)) + if (missingIds.length > 0) { + throw new Error( + message ?? + `Collection is missing items: ${missingIds.join(`, `)} (loaded: ${loadedIds.join(`, `)})` + ) + } +} + +/** + * Assert that a collection has loaded at least the expected items (may have more) + */ +export function assertLoadedAtLeast( + collection: Collection, + expectedIds: Array, + message?: string +) { + const loadedIds = getLoadedIds(collection) + const loadedSet = new Set(loadedIds) + + const missingIds = expectedIds.filter((id) => !loadedSet.has(id)) + if (missingIds.length > 0) { + throw new Error( + message ?? + `Collection is missing items: ${missingIds.join(`, `)} (loaded: ${loadedIds.join(`, `)})` + ) + } +} + +/** + * Assert that a collection has NOT loaded any of the specified items + */ +export function assertNotLoaded( + collection: Collection, + forbiddenIds: Array, + message?: string +) { + const loadedIds = getLoadedIds(collection) + const loadedSet = new Set(loadedIds) + + const foundIds = forbiddenIds.filter((id) => loadedSet.has(id)) + if (foundIds.length > 0) { + throw new Error( + message ?? `Collection should not have loaded: ${foundIds.join(`, `)}` + ) + } +} + +/** + * Assert that a collection's size matches expected + */ +export function assertCollectionSize( + collection: Collection, + expectedSize: number, + message?: string +) { + expect(collection.size, message).toBe(expectedSize) +} + +/** + * Assert that all items in a collection match a predicate + */ +export function assertAllItemsMatch( + collection: Collection, + predicate: (item: T) => boolean, + message?: string +) { + const items = Array.from(collection.state.values()) + const failingItems = items.filter((item) => !predicate(item)) + + if (failingItems.length > 0) { + throw new Error( + message ?? + `${failingItems.length} item(s) did not match predicate: ${JSON.stringify(failingItems[0])}` + ) + } +} + +/** + * Assert that items are sorted correctly + */ +export function assertSorted( + items: Array, + field: K, + direction: `asc` | `desc` = `asc`, + message?: string +) { + for (let i = 1; i < items.length; i++) { + const prev = items[i - 1]![field] + const curr = items[i]![field] + + if (direction === `asc`) { + if (prev > curr) { + throw new Error( + message ?? + `Items not sorted ascending by ${String(field)}: ${prev} > ${curr} at index ${i}` + ) + } + } else { + if (prev < curr) { + throw new Error( + message ?? + `Items not sorted descending by ${String(field)}: ${prev} < ${curr} at index ${i}` + ) + } + } + } +} + +/** + * Assert that predicate pushdown occurred (no over-fetching) + * This checks that the collection didn't load more data than necessary + */ +export function assertNoPushdownViolation( + collection: Collection, + expectedMaxIds: Array, + message?: string +) { + const loadedIds = getLoadedIds(collection) + const expectedSet = new Set(expectedMaxIds) + + // Check if any loaded IDs are not in expected set + const extraIds = loadedIds.filter((id) => !expectedSet.has(id)) + + if (extraIds.length > 0) { + throw new Error( + message ?? + `Predicate pushdown violation: Collection loaded ${extraIds.length} extra item(s) that don't match the predicate` + ) + } +} + +/** + * Assert that deduplication occurred + */ +export function assertDeduplicationOccurred( + actualLoads: number, + deduplicatedLoads: number, + expectedActualLoads: number, + message?: string +) { + expect(actualLoads, message ?? `Actual loads`).toBe(expectedActualLoads) + expect(deduplicatedLoads, message ?? `Deduplicated loads`).toBeGreaterThan(0) +} + +/** + * Assert that NO deduplication occurred (all requests were unique) + */ +export function assertNoDeduplication( + actualLoads: number, + deduplicatedLoads: number, + message?: string +) { + expect(deduplicatedLoads, message ?? `Deduplicated loads`).toBe(0) + expect(actualLoads, message ?? `Actual loads`).toBeGreaterThan(0) +} diff --git a/packages/db-collection-e2e/src/utils/helpers.ts b/packages/db-collection-e2e/src/utils/helpers.ts new file mode 100644 index 000000000..cd5a3676f --- /dev/null +++ b/packages/db-collection-e2e/src/utils/helpers.ts @@ -0,0 +1,206 @@ +import type { Collection } from "@tanstack/db" + +/** + * Wait for a condition to be true with timeout + */ +export async function waitFor( + condition: () => boolean | Promise, + options: { + timeout?: number + interval?: number + message?: string + } = {} +): Promise { + const { + timeout = 5000, + interval = 50, + message = `Condition not met`, + } = options + + const start = Date.now() + while (Date.now() - start < timeout) { + if (await condition()) { + return + } + await sleep(interval) + } + + throw new Error(`${message} (timeout after ${timeout}ms)`) +} + +/** + * Sleep for specified milliseconds + */ +export function sleep(ms: number): Promise { + return new Promise((resolve) => setTimeout(resolve, ms)) +} + +/** + * Get all loaded item IDs from a collection + */ +export function getLoadedIds( + collection: Collection +): Array { + return Array.from(collection.state.values()).map((item) => item.id) +} + +/** + * Get count of loaded items in a collection + */ +export function getLoadedCount( + collection: Collection +): number { + return collection.size +} + +/** + * Check if a collection has loaded specific IDs + */ +export function hasLoadedIds( + collection: Collection, + ids: Array +): boolean { + const loadedIds = getLoadedIds(collection) + return ids.every((id) => loadedIds.includes(id)) +} + +/** + * Check if a collection has ONLY loaded specific IDs (no extras) + */ +export function hasOnlyLoadedIds( + collection: Collection, + ids: Array +): boolean { + const loadedIds = getLoadedIds(collection) + const idsSet = new Set(ids) + + // Check that all loaded IDs are in expected IDs + if (!loadedIds.every((id) => idsSet.has(id))) { + return false + } + + // Check that all expected IDs are loaded + if (!ids.every((id) => loadedIds.includes(id))) { + return false + } + + return true +} + +/** + * Wait for a collection to reach a specific size + */ +export async function waitForCollectionSize( + collection: Collection, + expectedSize: number, + timeout = 5000 +): Promise { + await waitFor(() => collection.size === expectedSize, { + timeout, + message: `Collection size did not reach ${expectedSize} (current: ${collection.size})`, + }) +} + +/** + * Wait for collection to finish loading (status === 'ready') + */ +export async function waitForCollectionReady( + collection: Collection, + timeout = 5000 +): Promise { + await waitFor(() => collection.status === `ready`, { + timeout, + message: `Collection did not reach 'ready' status (current: ${collection.status})`, + }) +} + +/** + * Wait for a query to have data after preload. + * This is necessary for on-demand collections where Electric streams + * snapshot data asynchronously after loadSubset is triggered. + */ +export async function waitForQueryData( + query: Collection, + options: { + minSize?: number + timeout?: number + } = {} +): Promise { + const { minSize = 1, timeout = 2000 } = options + + await waitFor(() => query.size >= minSize, { + timeout, + interval: 10, + message: `Query did not load data (expected >= ${minSize}, got ${query.size})`, + }) +} + +/** + * Create a deduplication counter for testing + */ +export function createDeduplicationCounter() { + let actualLoads = 0 + let deduplicatedLoads = 0 + + return { + onLoad: () => { + actualLoads++ + }, + onDeduplicate: () => { + deduplicatedLoads++ + }, + getActualLoads: () => actualLoads, + getDeduplicatedLoads: () => deduplicatedLoads, + getTotalAttempts: () => actualLoads + deduplicatedLoads, + reset: () => { + actualLoads = 0 + deduplicatedLoads = 0 + }, + } +} + +/** + * Sort array of objects by a field + */ +export function sortBy( + array: Array, + field: K, + direction: `asc` | `desc` = `asc` +): Array { + return [...array].sort((a, b) => { + const aVal = a[field] + const bVal = b[field] + + if (aVal < bVal) return direction === `asc` ? -1 : 1 + if (aVal > bVal) return direction === `asc` ? 1 : -1 + return 0 + }) +} + +/** + * Filter array by predicate function + */ +export function filterBy( + array: Array, + predicate: (item: T) => boolean +): Array { + return array.filter(predicate) +} + +/** + * Paginate array + */ +export function paginate( + array: Array, + options: { limit?: number; offset?: number } = {} +): Array { + const { limit, offset = 0 } = options + + let result = array.slice(offset) + + if (limit !== undefined) { + result = result.slice(0, limit) + } + + return result +} diff --git a/packages/db-collection-e2e/support/global-setup.ts b/packages/db-collection-e2e/support/global-setup.ts new file mode 100644 index 000000000..4b61e3fcd --- /dev/null +++ b/packages/db-collection-e2e/support/global-setup.ts @@ -0,0 +1,152 @@ +import type { GlobalSetupContext } from "vitest/node" +import { Client, type ClientConfig } from "pg" + +const ELECTRIC_URL = process.env.ELECTRIC_URL ?? "http://localhost:3000" +const POSTGRES_HOST = process.env.POSTGRES_HOST ?? "localhost" +const POSTGRES_PORT = parseInt(process.env.POSTGRES_PORT ?? "54321") +const POSTGRES_USER = process.env.POSTGRES_USER ?? "postgres" +const POSTGRES_PASSWORD = process.env.POSTGRES_PASSWORD ?? "password" +const POSTGRES_DB = process.env.POSTGRES_DB ?? "e2e_test" +const TEST_SCHEMA = "e2e_test" + +// Module augmentation for type-safe context injection +declare module "vitest" { + export interface ProvidedContext { + baseUrl: string + testSchema: string + postgresConfig: { + host: string + port: number + user: string + password: string + database: string + } + } +} + +/** + * Create a PostgreSQL client with default e2e test configuration + */ +export function makePgClient(overrides: ClientConfig = {}): Client { + return new Client({ + host: POSTGRES_HOST, + port: POSTGRES_PORT, + user: POSTGRES_USER, + password: POSTGRES_PASSWORD, + database: POSTGRES_DB, + options: `-csearch_path=${TEST_SCHEMA}`, + ...overrides, + }) +} + +/** + * Wait for Electric server to be ready + */ +async function waitForElectric(url: string): Promise { + return new Promise((resolve, reject) => { + const timeout = setTimeout(() => { + reject(new Error(`Timed out waiting for Electric to be active at ${url}`)) + }, 10000) + + const check = async (): Promise => { + try { + const res = await fetch(`${url}/v1/health`) + if (res.ok) { + const data = (await res.json()) as { status: string } + if (data.status === "active") { + clearTimeout(timeout) + return resolve() + } + } + setTimeout(() => void check(), 100) + } catch { + setTimeout(() => void check(), 100) + } + } + + void check() + }) +} + +/** + * Wait for Postgres to be ready + */ +async function waitForPostgres(client: Client): Promise { + return new Promise((resolve, reject) => { + const timeout = setTimeout(() => { + reject(new Error("Timed out waiting for Postgres")) + }, 10000) + + const check = async (): Promise => { + try { + await client.connect() + clearTimeout(timeout) + return resolve() + } catch { + setTimeout(() => void check(), 100) + } + } + + void check() + }) +} + +/** + * Global setup for e2e test suite + * + * This runs once before all tests and: + * 1. Waits for Electric server to be healthy + * 2. Connects to Postgres + * 3. Creates test schema + * 4. Provides context to all tests + * 5. Returns cleanup function + */ +export default async function ({ provide }: GlobalSetupContext) { + console.log("🚀 Starting e2e test suite global setup...") + + // Wait for Electric server to be ready + console.log(`⏳ Waiting for Electric at ${ELECTRIC_URL}...`) + await waitForElectric(ELECTRIC_URL) + console.log("✓ Electric is ready") + + // Connect to Postgres + console.log( + `⏳ Connecting to Postgres at ${POSTGRES_HOST}:${POSTGRES_PORT}...` + ) + const client = makePgClient() + await waitForPostgres(client) + console.log("✓ Postgres is ready") + + // Create test schema + console.log(`⏳ Creating test schema: ${TEST_SCHEMA}...`) + await client.query(`CREATE SCHEMA IF NOT EXISTS ${TEST_SCHEMA}`) + console.log("✓ Test schema created") + + // Provide context values to all tests + provide("baseUrl", ELECTRIC_URL) + provide("testSchema", TEST_SCHEMA) + provide("postgresConfig", { + host: POSTGRES_HOST, + port: POSTGRES_PORT, + user: POSTGRES_USER, + password: POSTGRES_PASSWORD, + database: POSTGRES_DB, + }) + + console.log("✅ Global setup complete\n") + + // Return cleanup function (runs once after all tests) + return async () => { + console.log("\n🧹 Running global teardown...") + try { + await client.query(`DROP SCHEMA IF EXISTS ${TEST_SCHEMA} CASCADE`) + console.log("✓ Test schema dropped") + } catch (error) { + console.error("Error dropping test schema:", error) + } finally { + await client.end() + console.log("✓ Postgres connection closed") + console.log("✅ Global teardown complete") + } + } +} diff --git a/packages/db-collection-e2e/support/test-context.ts b/packages/db-collection-e2e/support/test-context.ts new file mode 100644 index 000000000..d9183bf09 --- /dev/null +++ b/packages/db-collection-e2e/support/test-context.ts @@ -0,0 +1,236 @@ +import { test, inject } from "vitest" +import { Client } from "pg" +import { makePgClient } from "./global-setup" +import type { SeedDataResult } from "../src/types" +import { generateSeedData } from "../src/fixtures/seed-data" + +/** + * Base fixture with database client and abort controller + */ +export const testWithDb = test.extend<{ + dbClient: Client + aborter: AbortController + baseUrl: string + testSchema: string + tableName: (base: string) => string +}>({ + dbClient: async ({}, use) => { + const schema = inject("testSchema") + const client = makePgClient({ + options: `-csearch_path=${schema}`, + }) + await client.connect() + + // Ensure schema is set + await client.query(`SET search_path TO ${schema}`) + + await use(client) + await client.end() + }, + + aborter: async ({}, use) => { + const controller = new AbortController() + await use(controller) + controller.abort("Test complete") + }, + + baseUrl: async ({}, use) => { + await use(inject("baseUrl")) + }, + + testSchema: async ({}, use) => { + await use(inject("testSchema")) + }, + + tableName: async ({ task }, use) => { + // Generate unique table names based on task ID and random suffix + await use((base: string) => { + const taskId = task.id.replace(/[^a-zA-Z0-9]/g, "_") + const random = Math.random().toString(16).slice(2, 8) + return `"${base}_${taskId}_${random}"` + }) + }, +}) + +/** + * Extended fixture with test tables (Users, Posts, Comments) + */ +export const testWithTables = testWithDb.extend<{ + usersTable: string + postsTable: string + commentsTable: string + dropTables: () => Promise +}>({ + usersTable: async ({ dbClient, tableName, task }, use) => { + const name = tableName("users") + const taskFile = task.file?.name.replace(/'/g, "`") ?? "unknown" + const taskName = task.name.replace(/'/g, "`") + + await dbClient.query(` + DROP TABLE IF EXISTS ${name}; + CREATE TABLE ${name} ( + id UUID PRIMARY KEY, + name TEXT NOT NULL, + email TEXT, + age INTEGER NOT NULL, + is_active BOOLEAN NOT NULL DEFAULT true, + created_at TIMESTAMP NOT NULL DEFAULT NOW(), + metadata JSONB, + deleted_at TIMESTAMP + ); + COMMENT ON TABLE ${name} IS 'Created for ${taskFile} - ${taskName}'; + `) + + await use(name) + + try { + await dbClient.query(`DROP TABLE IF EXISTS ${name}`) + } catch (error) { + console.error(`Error dropping table ${name}:`, error) + } + }, + + postsTable: async ({ dbClient, tableName, usersTable, task }, use) => { + const name = tableName("posts") + const taskFile = task.file?.name.replace(/'/g, "`") ?? "unknown" + const taskName = task.name.replace(/'/g, "`") + + await dbClient.query(` + DROP TABLE IF EXISTS ${name}; + CREATE TABLE ${name} ( + id UUID PRIMARY KEY, + user_id UUID NOT NULL, + title TEXT NOT NULL, + content TEXT, + view_count INTEGER NOT NULL DEFAULT 0, + published_at TIMESTAMP, + deleted_at TIMESTAMP + ); + COMMENT ON TABLE ${name} IS 'Created for ${taskFile} - ${taskName}'; + `) + + await use(name) + + try { + await dbClient.query(`DROP TABLE IF EXISTS ${name}`) + } catch (error) { + console.error(`Error dropping table ${name}:`, error) + } + }, + + commentsTable: async ( + { dbClient, tableName, postsTable, usersTable, task }, + use + ) => { + const name = tableName("comments") + const taskFile = task.file?.name.replace(/'/g, "`") ?? "unknown" + const taskName = task.name.replace(/'/g, "`") + + await dbClient.query(` + DROP TABLE IF EXISTS ${name}; + CREATE TABLE ${name} ( + id UUID PRIMARY KEY, + post_id UUID NOT NULL, + user_id UUID NOT NULL, + text TEXT NOT NULL, + created_at TIMESTAMP NOT NULL DEFAULT NOW(), + deleted_at TIMESTAMP + ); + COMMENT ON TABLE ${name} IS 'Created for ${taskFile} - ${taskName}'; + `) + + await use(name) + + try { + await dbClient.query(`DROP TABLE IF EXISTS ${name}`) + } catch (error) { + console.error(`Error dropping table ${name}:`, error) + } + }, + + dropTables: async ( + { dbClient, usersTable, postsTable, commentsTable }, + use + ) => { + await use(async () => { + try { + await dbClient.query(`DROP TABLE IF EXISTS ${commentsTable}`) + await dbClient.query(`DROP TABLE IF EXISTS ${postsTable}`) + await dbClient.query(`DROP TABLE IF EXISTS ${usersTable}`) + } catch (error) { + console.error("Error dropping tables:", error) + } + }) + }, +}) + +/** + * Extended fixture with seeded test data + */ +export const testWithSeedData = testWithTables.extend<{ + seedData: SeedDataResult + insertSeedData: () => Promise +}>({ + seedData: async ({}, use) => { + const seed = generateSeedData() + await use(seed) + }, + + insertSeedData: async ( + { dbClient, usersTable, postsTable, commentsTable, seedData }, + use + ) => { + await use(async () => { + // Insert users + for (const user of seedData.users) { + await dbClient.query( + `INSERT INTO ${usersTable} (id, name, email, age, is_active, created_at, metadata, deleted_at) + VALUES ($1, $2, $3, $4, $5, $6, $7, $8)`, + [ + user.id, + user.name, + user.email, + user.age, + user.isActive, + user.createdAt, + user.metadata ? JSON.stringify(user.metadata) : null, + user.deletedAt, + ] + ) + } + + // Insert posts + for (const post of seedData.posts) { + await dbClient.query( + `INSERT INTO ${postsTable} (id, user_id, title, content, view_count, published_at, deleted_at) + VALUES ($1, $2, $3, $4, $5, $6, $7)`, + [ + post.id, + post.userId, + post.title, + post.content, + post.viewCount, + post.publishedAt, + post.deletedAt, + ] + ) + } + + // Insert comments + for (const comment of seedData.comments) { + await dbClient.query( + `INSERT INTO ${commentsTable} (id, post_id, user_id, text, created_at, deleted_at) + VALUES ($1, $2, $3, $4, $5, $6)`, + [ + comment.id, + comment.postId, + comment.userId, + comment.text, + comment.createdAt, + comment.deletedAt, + ] + ) + } + }) + }, +}) diff --git a/packages/db-collection-e2e/tsconfig.json b/packages/db-collection-e2e/tsconfig.json new file mode 100644 index 000000000..7c90fdfe9 --- /dev/null +++ b/packages/db-collection-e2e/tsconfig.json @@ -0,0 +1,26 @@ +{ + "extends": "../../tsconfig.json", + "compilerOptions": { + "outDir": "./dist", + "rootDir": "./src", + "module": "ESNext", + "moduleResolution": "bundler", + "target": "ES2022", + "lib": ["ES2022", "DOM"], + "types": ["vitest/globals", "node"], + "strict": true, + "esModuleInterop": true, + "skipLibCheck": true, + "forceConsistentCasingInFileNames": true, + "resolveJsonModule": true, + "allowSyntheticDefaultImports": true, + "paths": { + "@tanstack/db": ["../db/src"], + "@tanstack/db-ivm": ["../db-ivm/src"], + "@tanstack/electric-db-collection": ["../electric-db-collection"], + "@tanstack/query-db-collection": ["../query-db-collection"] + } + }, + "include": ["src/**/*", "support/**/*"], + "exclude": ["node_modules", "dist"] +} diff --git a/packages/db-collection-e2e/vite.config.ts b/packages/db-collection-e2e/vite.config.ts new file mode 100644 index 000000000..a231f935d --- /dev/null +++ b/packages/db-collection-e2e/vite.config.ts @@ -0,0 +1,15 @@ +import { defineConfig } from "vitest/config" + +export default defineConfig({ + test: { + globalSetup: "./support/global-setup.ts", + fileParallelism: false, // Critical: Serial execution for shared database + timeout: 30000, + environment: "jsdom", + coverage: { + provider: "istanbul", + reporter: ["text", "json", "html"], + include: ["**/src/**"], + }, + }, +}) diff --git a/packages/db/src/collection/index.ts b/packages/db/src/collection/index.ts index add2fc4d4..5616a8ceb 100644 --- a/packages/db/src/collection/index.ts +++ b/packages/db/src/collection/index.ts @@ -133,7 +133,7 @@ export function createCollection< TKey extends string | number = string | number, TUtils extends UtilsRecord = UtilsRecord, >( - options: CollectionConfig, TKey, T> & { + options: CollectionConfig, TKey, T, TUtils> & { schema: T utils?: TUtils } & NonSingleResult @@ -146,7 +146,7 @@ export function createCollection< TKey extends string | number = string | number, TUtils extends UtilsRecord = UtilsRecord, >( - options: CollectionConfig, TKey, T> & { + options: CollectionConfig, TKey, T, TUtils> & { schema: T utils?: TUtils } & SingleResult @@ -160,7 +160,7 @@ export function createCollection< TKey extends string | number = string | number, TUtils extends UtilsRecord = UtilsRecord, >( - options: CollectionConfig & { + options: CollectionConfig & { schema?: never // prohibit schema if an explicit type is provided utils?: TUtils } & NonSingleResult @@ -173,7 +173,7 @@ export function createCollection< TKey extends string | number = string | number, TUtils extends UtilsRecord = UtilsRecord, >( - options: CollectionConfig & { + options: CollectionConfig & { schema?: never // prohibit schema if an explicit type is provided utils?: TUtils } & SingleResult diff --git a/packages/db/src/local-storage.ts b/packages/db/src/local-storage.ts index e011902d7..50e204171 100644 --- a/packages/db/src/local-storage.ts +++ b/packages/db/src/local-storage.ts @@ -283,7 +283,12 @@ export function localStorageCollectionOptions< config: LocalStorageCollectionConfig, T, TKey> & { schema: T } -): CollectionConfig, TKey, T> & { +): CollectionConfig< + InferSchemaOutput, + TKey, + T, + LocalStorageCollectionUtils +> & { id: string utils: LocalStorageCollectionUtils schema: T @@ -298,7 +303,7 @@ export function localStorageCollectionOptions< config: LocalStorageCollectionConfig & { schema?: never // prohibit schema } -): CollectionConfig & { +): CollectionConfig & { id: string utils: LocalStorageCollectionUtils schema?: never // no schema in the result @@ -306,7 +311,10 @@ export function localStorageCollectionOptions< export function localStorageCollectionOptions( config: LocalStorageCollectionConfig -): Omit, `id`> & { +): Omit< + CollectionConfig, + `id` +> & { id: string utils: LocalStorageCollectionUtils schema?: StandardSchemaV1 diff --git a/packages/db/src/query/compiler/expressions.ts b/packages/db/src/query/compiler/expressions.ts index 821d9b708..1437786f4 100644 --- a/packages/db/src/query/compiler/expressions.ts +++ b/packages/db/src/query/compiler/expressions.ts @@ -15,6 +15,9 @@ export const SUPPORTED_COLLECTION_FUNCS = new Set([ `and`, `or`, `in`, + `isNull`, + `isUndefined`, + `not`, ]) /** diff --git a/packages/db/src/query/expression-helpers.ts b/packages/db/src/query/expression-helpers.ts index febcdf843..dd651f396 100644 --- a/packages/db/src/query/expression-helpers.ts +++ b/packages/db/src/query/expression-helpers.ts @@ -44,7 +44,7 @@ export type FieldPath = Array export interface SimpleComparison { field: FieldPath operator: string - value: any + value?: any // Optional for operators like isNull and isUndefined that don't have a value } /** @@ -304,19 +304,23 @@ export function parseOrderByExpression( * Extracts all simple comparisons from a WHERE expression. * This is useful for simple APIs that only support basic filters. * - * Note: This only works for simple AND-ed conditions. Throws an error if it encounters - * unsupported operations like OR, NOT, or complex nested expressions. + * Note: This only works for simple AND-ed conditions and NOT-wrapped comparisons. + * Throws an error if it encounters unsupported operations like OR or complex nested expressions. + * + * NOT operators are flattened by prefixing the operator name (e.g., `not(eq(...))` becomes `not_eq`). * * @param expr - The WHERE expression to parse * @returns Array of simple comparisons - * @throws Error if expression contains OR, NOT, or other unsupported operations + * @throws Error if expression contains OR or other unsupported operations * * @example * ```typescript * const comparisons = extractSimpleComparisons(where) * // Returns: [ * // { field: ['category'], operator: 'eq', value: 'electronics' }, - * // { field: ['price'], operator: 'lt', value: 100 } + * // { field: ['price'], operator: 'lt', value: 100 }, + * // { field: ['email'], operator: 'isNull' }, // No value for null checks + * // { field: ['status'], operator: 'not_eq', value: 'archived' } * // ] * ``` */ @@ -335,12 +339,65 @@ export function extractSimpleComparisons( return } + // Handle NOT - recurse into argument and prefix operator with 'not_' + if (e.name === `not`) { + const [arg] = e.args + if (!arg || arg.type !== `func`) { + throw new Error( + `extractSimpleComparisons requires a comparison or null check inside 'not' operator.` + ) + } + + // Handle NOT with null/undefined checks + const nullCheckOps = [`isNull`, `isUndefined`] + if (nullCheckOps.includes(arg.name)) { + const [fieldArg] = arg.args + const field = fieldArg?.type === `ref` ? fieldArg.path : null + + if (field) { + comparisons.push({ + field, + operator: `not_${arg.name}`, + // No value for null/undefined checks + }) + } else { + throw new Error( + `extractSimpleComparisons requires a field reference for '${arg.name}' operator.` + ) + } + return + } + + // Handle NOT with comparison operators + const comparisonOps = [`eq`, `gt`, `gte`, `lt`, `lte`, `in`] + if (comparisonOps.includes(arg.name)) { + const [leftArg, rightArg] = arg.args + const field = leftArg?.type === `ref` ? leftArg.path : null + const value = rightArg?.type === `val` ? rightArg.value : null + + if (field && value !== undefined) { + comparisons.push({ + field, + operator: `not_${arg.name}`, + value, + }) + } else { + throw new Error( + `extractSimpleComparisons requires simple field-value comparisons. Found complex expression for 'not(${arg.name})' operator.` + ) + } + return + } + + // NOT can only wrap simple comparisons or null checks + throw new Error( + `extractSimpleComparisons does not support 'not(${arg.name})'. NOT can only wrap comparison operators (eq, gt, gte, lt, lte, in) or null checks (isNull, isUndefined).` + ) + } + // Throw on unsupported operations const unsupportedOps = [ `or`, - `not`, - `isNull`, - `isUndefined`, `like`, `ilike`, `upper`, @@ -361,6 +418,28 @@ export function extractSimpleComparisons( ) } + // Handle null/undefined check operators (single argument, no value) + const nullCheckOps = [`isNull`, `isUndefined`] + if (nullCheckOps.includes(e.name)) { + const [fieldArg] = e.args + + // Extract field (must be a ref) + const field = fieldArg?.type === `ref` ? fieldArg.path : null + + if (field) { + comparisons.push({ + field, + operator: e.name, + // No value for null/undefined checks + }) + } else { + throw new Error( + `extractSimpleComparisons requires a field reference for '${e.name}' operator.` + ) + } + return + } + // Handle comparison operators const comparisonOps = [`eq`, `gt`, `gte`, `lt`, `lte`, `in`] if (comparisonOps.includes(e.name)) { diff --git a/packages/db/tests/query/expression-helpers.test.ts b/packages/db/tests/query/expression-helpers.test.ts index aab1f0949..33fcbbcac 100644 --- a/packages/db/tests/query/expression-helpers.test.ts +++ b/packages/db/tests/query/expression-helpers.test.ts @@ -432,6 +432,83 @@ describe(`Expression Helpers`, () => { }, ]) }) + + it(`should handle NOT with comparison operators`, () => { + const expr = new Func(`not`, [ + new Func(`eq`, [new PropRef([`category`]), new Value(`electronics`)]), + ]) + + const result = extractSimpleComparisons(expr) + + expect(result).toEqual([ + { field: [`category`], operator: `not_eq`, value: `electronics` }, + ]) + }) + + it(`should handle NOT with all comparison operators`, () => { + const expr = new Func(`and`, [ + new Func(`not`, [new Func(`eq`, [new PropRef([`a`]), new Value(1)])]), + new Func(`not`, [new Func(`gt`, [new PropRef([`b`]), new Value(2)])]), + new Func(`not`, [new Func(`lt`, [new PropRef([`c`]), new Value(3)])]), + new Func(`not`, [ + new Func(`in`, [new PropRef([`d`]), new Value([4, 5])]), + ]), + ]) + + const result = extractSimpleComparisons(expr) + + expect(result).toEqual([ + { field: [`a`], operator: `not_eq`, value: 1 }, + { field: [`b`], operator: `not_gt`, value: 2 }, + { field: [`c`], operator: `not_lt`, value: 3 }, + { field: [`d`], operator: `not_in`, value: [4, 5] }, + ]) + }) + + it(`should handle NOT with null checks`, () => { + const expr = new Func(`and`, [ + new Func(`not`, [new Func(`isNull`, [new PropRef([`email`])])]), + new Func(`not`, [new Func(`isUndefined`, [new PropRef([`name`])])]), + ]) + + const result = extractSimpleComparisons(expr) + + expect(result).toEqual([ + { field: [`email`], operator: `not_isNull` }, + { field: [`name`], operator: `not_isUndefined` }, + ]) + }) + + it(`should handle mixed NOT and regular comparisons`, () => { + const expr = new Func(`and`, [ + new Func(`eq`, [new PropRef([`status`]), new Value(`active`)]), + new Func(`not`, [ + new Func(`eq`, [new PropRef([`category`]), new Value(`archived`)]), + ]), + new Func(`gt`, [new PropRef([`age`]), new Value(18)]), + ]) + + const result = extractSimpleComparisons(expr) + + expect(result).toEqual([ + { field: [`status`], operator: `eq`, value: `active` }, + { field: [`category`], operator: `not_eq`, value: `archived` }, + { field: [`age`], operator: `gt`, value: 18 }, + ]) + }) + + it(`should throw on NOT wrapping AND/OR (complex)`, () => { + const expr = new Func(`not`, [ + new Func(`and`, [ + new Func(`eq`, [new PropRef([`a`]), new Value(1)]), + new Func(`eq`, [new PropRef([`b`]), new Value(2)]), + ]), + ]) + + expect(() => extractSimpleComparisons(expr)).toThrow( + `extractSimpleComparisons does not support 'not(and)'` + ) + }) }) describe(`parseLoadSubsetOptions`, () => { diff --git a/packages/electric-db-collection/e2e/electric.e2e.test.ts b/packages/electric-db-collection/e2e/electric.e2e.test.ts new file mode 100644 index 000000000..d3b8135af --- /dev/null +++ b/packages/electric-db-collection/e2e/electric.e2e.test.ts @@ -0,0 +1,461 @@ +/** + * Electric Collection E2E Tests + * + * end-to-end tests using actual Postgres + Electric sync + */ + +import { afterAll, afterEach, beforeAll, describe, inject } from "vitest" +import { createCollection } from "@tanstack/db" +import { electricCollectionOptions } from "../src/electric" +import { makePgClient } from "../../db-collection-e2e/support/global-setup" +import { + createCollationTestSuite, + createDeduplicationTestSuite, + createJoinsTestSuite, + createLiveUpdatesTestSuite, + createMutationsTestSuite, + createPaginationTestSuite, + createPredicatesTestSuite, + generateSeedData, +} from "../../db-collection-e2e/src/index" +import { waitFor } from "../../db-collection-e2e/src/utils/helpers" +import type { E2ETestConfig } from "../../db-collection-e2e/src/types" +import type { Client } from "pg" + +describe(`Electric Collection E2E Tests`, () => { + let config: E2ETestConfig + let dbClient: Client + let usersTable: string + let postsTable: string + let commentsTable: string + + beforeAll(async () => { + const baseUrl = inject(`baseUrl`) + const testSchema = inject(`testSchema`) + const seedData = generateSeedData() + + // Create unique table names (quoted for Electric) + const testId = Date.now().toString(16) + usersTable = `"users_e2e_${testId}"` + postsTable = `"posts_e2e_${testId}"` + commentsTable = `"comments_e2e_${testId}"` + + // Connect to database + dbClient = makePgClient({ options: `-csearch_path=${testSchema}` }) + await dbClient.connect() + await dbClient.query(`SET search_path TO ${testSchema}`) + + // Create tables + await dbClient.query(` + CREATE TABLE ${usersTable} ( + id UUID PRIMARY KEY, + name TEXT NOT NULL, + email TEXT, + age INTEGER NOT NULL, + "isActive" BOOLEAN NOT NULL DEFAULT true, + "createdAt" TIMESTAMP NOT NULL DEFAULT NOW(), + metadata JSONB, + "deletedAt" TIMESTAMP + ) + `) + + await dbClient.query(` + CREATE TABLE ${postsTable} ( + id UUID PRIMARY KEY, + "userId" UUID NOT NULL, + title TEXT NOT NULL, + content TEXT, + "viewCount" INTEGER NOT NULL DEFAULT 0, + "publishedAt" TIMESTAMP, + "deletedAt" TIMESTAMP + ) + `) + + await dbClient.query(` + CREATE TABLE ${commentsTable} ( + id UUID PRIMARY KEY, + "postId" UUID NOT NULL, + "userId" UUID NOT NULL, + text TEXT NOT NULL, + "createdAt" TIMESTAMP NOT NULL DEFAULT NOW(), + "deletedAt" TIMESTAMP + ) + `) + + // Insert seed data + console.log(`Inserting ${seedData.users.length} users...`) + for (const user of seedData.users) { + await dbClient.query( + `INSERT INTO ${usersTable} (id, name, email, age, "isActive", "createdAt", metadata, "deletedAt") + VALUES ($1, $2, $3, $4, $5, $6, $7, $8)`, + [ + user.id, + user.name, + user.email, + user.age, + user.isActive, + user.createdAt, + user.metadata ? JSON.stringify(user.metadata) : null, + user.deletedAt, + ] + ) + } + console.log(`Inserted ${seedData.users.length} users successfully`) + + for (const post of seedData.posts) { + await dbClient.query( + `INSERT INTO ${postsTable} (id, "userId", title, content, "viewCount", "publishedAt", "deletedAt") + VALUES ($1, $2, $3, $4, $5, $6, $7)`, + [ + post.id, + post.userId, + post.title, + post.content, + post.viewCount, + post.publishedAt, + post.deletedAt, + ] + ) + } + + for (const comment of seedData.comments) { + await dbClient.query( + `INSERT INTO ${commentsTable} (id, "postId", "userId", text, "createdAt", "deletedAt") + VALUES ($1, $2, $3, $4, $5, $6)`, + [ + comment.id, + comment.postId, + comment.userId, + comment.text, + comment.createdAt, + comment.deletedAt, + ] + ) + } + + // Wait for Electric to see the data because Electric's logical replication + // slot may be lagging a bit behind so we need to ensure that Electric has seen the data + // before we start the tests otherwise the tests are faster than the replication slot + // and won't see any data. + const tempUsersCollection = createCollection( + electricCollectionOptions({ + id: `temp-verify-users-${testId}`, + shapeOptions: { + url: `${baseUrl}/v1/shape`, + params: { + table: `${testSchema}.${usersTable}`, + }, + }, + syncMode: `eager`, + getKey: (item: any) => item.id, + startSync: true, + }) + ) + + const tempPostsCollection = createCollection( + electricCollectionOptions({ + id: `temp-verify-posts-${testId}`, + shapeOptions: { + url: `${baseUrl}/v1/shape`, + params: { + table: `${testSchema}.${postsTable}`, + }, + }, + syncMode: `eager`, + getKey: (item: any) => item.id, + startSync: true, + }) + ) + + const tempCommentsCollection = createCollection( + electricCollectionOptions({ + id: `temp-verify-comments-${testId}`, + shapeOptions: { + url: `${baseUrl}/v1/shape`, + params: { + table: `${testSchema}.${commentsTable}`, + }, + }, + syncMode: `eager`, + getKey: (item: any) => item.id, + startSync: true, + }) + ) + + await Promise.all([ + tempUsersCollection.preload(), + tempPostsCollection.preload(), + tempCommentsCollection.preload(), + ]) + + await Promise.all([ + waitFor(() => tempUsersCollection.size >= seedData.users.length, { + timeout: 30000, + interval: 500, + message: `Electric replication has not processed WAL entries for users (got ${tempUsersCollection.size}/${seedData.users.length})`, + }), + waitFor(() => tempPostsCollection.size >= seedData.posts.length, { + timeout: 30000, + interval: 500, + message: `Electric replication has not processed WAL entries for posts (got ${tempPostsCollection.size}/${seedData.posts.length})`, + }), + waitFor(() => tempCommentsCollection.size >= seedData.comments.length, { + timeout: 30000, + interval: 500, + message: `Electric replication has not processed WAL entries for comments (got ${tempCommentsCollection.size}/${seedData.comments.length})`, + }), + ]) + + // Clean up the temporary collections + await Promise.all([ + tempUsersCollection.cleanup(), + tempPostsCollection.cleanup(), + tempCommentsCollection.cleanup(), + ]) + + // Create REAL Electric collections + const eagerUsers = createCollection( + electricCollectionOptions({ + id: `electric-e2e-users-eager-${testId}`, + shapeOptions: { + url: `${baseUrl}/v1/shape`, + params: { + table: `${testSchema}.${usersTable}`, + }, + }, + syncMode: `eager`, + getKey: (item: any) => item.id, + startSync: true, + }) + ) + + const eagerPosts = createCollection( + electricCollectionOptions({ + id: `electric-e2e-posts-eager-${testId}`, + shapeOptions: { + url: `${baseUrl}/v1/shape`, + params: { + table: `${testSchema}.${postsTable}`, + }, + }, + syncMode: `eager`, + getKey: (item: any) => item.id, + startSync: true, + }) + ) + + const eagerComments = createCollection( + electricCollectionOptions({ + id: `electric-e2e-comments-eager-${testId}`, + shapeOptions: { + url: `${baseUrl}/v1/shape`, + params: { + table: `${testSchema}.${commentsTable}`, + }, + }, + syncMode: `eager`, + getKey: (item: any) => item.id, + startSync: true, + }) + ) + + const onDemandUsers = createCollection( + electricCollectionOptions({ + id: `electric-e2e-users-ondemand-${testId}`, + shapeOptions: { + url: `${baseUrl}/v1/shape`, + params: { + table: `${testSchema}.${usersTable}`, + }, + }, + syncMode: `on-demand`, + getKey: (item: any) => item.id, + startSync: true, + }) + ) + + const onDemandPosts = createCollection( + electricCollectionOptions({ + id: `electric-e2e-posts-ondemand-${testId}`, + shapeOptions: { + url: `${baseUrl}/v1/shape`, + params: { + table: `${testSchema}.${postsTable}`, + }, + }, + syncMode: `on-demand`, + getKey: (item: any) => item.id, + startSync: true, + }) + ) + + const onDemandComments = createCollection( + electricCollectionOptions({ + id: `electric-e2e-comments-ondemand-${testId}`, + shapeOptions: { + url: `${baseUrl}/v1/shape`, + params: { + table: `${testSchema}.${commentsTable}`, + }, + }, + syncMode: `on-demand`, + getKey: (item: any) => item.id, + startSync: true, + }) + ) + + // Wait for eager collections to sync all data + await eagerUsers.preload() + await eagerPosts.preload() + await eagerComments.preload() + + // Wait for on-demand collections to be ready (they start empty) + await onDemandUsers.preload() + await onDemandPosts.preload() + await onDemandComments.preload() + + config = { + collections: { + eager: { + users: eagerUsers as any, + posts: eagerPosts as any, + comments: eagerComments as any, + }, + onDemand: { + users: onDemandUsers as any, + posts: onDemandPosts as any, + comments: onDemandComments as any, + }, + }, + hasReplicationLag: true, // Electric has async replication lag + mutations: { + // Use direct SQL for Electric tests (simulates external changes) + // This tests that Electric sync picks up database changes + insertUser: async (user) => { + await dbClient.query( + `INSERT INTO ${usersTable} (id, name, email, age, "isActive", "createdAt", metadata, "deletedAt") + VALUES ($1, $2, $3, $4, $5, $6, $7, $8)`, + [ + user.id, + user.name, + user.email || null, + user.age, + user.isActive, + user.createdAt, + user.metadata ? JSON.stringify(user.metadata) : null, + user.deletedAt || null, + ] + ) + }, + updateUser: async (id, updates) => { + const setClauses: Array = [] + const values: Array = [] + let paramIndex = 1 + + if (updates.age !== undefined) { + setClauses.push(`age = $${paramIndex++}`) + values.push(updates.age) + } + if (updates.name !== undefined) { + setClauses.push(`name = $${paramIndex++}`) + values.push(updates.name) + } + if (updates.email !== undefined) { + setClauses.push(`email = $${paramIndex++}`) + values.push(updates.email) + } + if (updates.isActive !== undefined) { + setClauses.push(`"isActive" = $${paramIndex++}`) + values.push(updates.isActive) + } + + values.push(id) + await dbClient.query( + `UPDATE ${usersTable} SET ${setClauses.join(`, `)} WHERE id = $${paramIndex}`, + values + ) + }, + deleteUser: async (id) => { + await dbClient.query(`DELETE FROM ${usersTable} WHERE id = $1`, [id]) + }, + insertPost: async (post) => { + await dbClient.query( + `INSERT INTO ${postsTable} (id, "userId", title, content, "viewCount", "publishedAt", "deletedAt") + VALUES ($1, $2, $3, $4, $5, $6, $7)`, + [ + post.id, + post.userId, + post.title, + post.content || null, + post.viewCount, + post.publishedAt || null, + post.deletedAt || null, + ] + ) + }, + }, + setup: async () => {}, + afterEach: async () => { + // Clean up and restart on-demand collections + // This validates cleanup() works and each test starts fresh + await onDemandUsers.cleanup() + await onDemandPosts.cleanup() + await onDemandComments.cleanup() + + // Restart sync after cleanup + onDemandUsers.startSyncImmediate() + onDemandPosts.startSyncImmediate() + onDemandComments.startSyncImmediate() + + // Wait for collections to be ready + await onDemandUsers.preload() + await onDemandPosts.preload() + await onDemandComments.preload() + }, + teardown: async () => { + await Promise.all([ + eagerUsers.cleanup(), + eagerPosts.cleanup(), + eagerComments.cleanup(), + onDemandUsers.cleanup(), + onDemandPosts.cleanup(), + onDemandComments.cleanup(), + ]) + }, + } + }, 60000) // 60 second timeout for setup + + afterEach(async () => { + if (config.afterEach) { + await config.afterEach() + } + }) + + afterAll(async () => { + await config.teardown() + + // Drop tables + try { + await dbClient.query(`DROP TABLE IF EXISTS ${commentsTable}`) + await dbClient.query(`DROP TABLE IF EXISTS ${postsTable}`) + await dbClient.query(`DROP TABLE IF EXISTS ${usersTable}`) + } catch (e) { + console.error(`Error dropping tables:`, e) + } + await dbClient.end() + }) + + // Helper to get config + function getConfig() { + return Promise.resolve(config) + } + + // Run all test suites + createPredicatesTestSuite(getConfig) + createPaginationTestSuite(getConfig) + createJoinsTestSuite(getConfig) + createDeduplicationTestSuite(getConfig) + createCollationTestSuite(getConfig) + createMutationsTestSuite(getConfig) + createLiveUpdatesTestSuite(getConfig) +}) diff --git a/packages/electric-db-collection/package.json b/packages/electric-db-collection/package.json index 331588704..04f285386 100644 --- a/packages/electric-db-collection/package.json +++ b/packages/electric-db-collection/package.json @@ -11,7 +11,9 @@ }, "devDependencies": { "@types/debug": "^4.1.12", - "@vitest/coverage-istanbul": "^3.2.4" + "@types/pg": "^8.11.10", + "@vitest/coverage-istanbul": "^3.2.4", + "pg": "^8.13.1" }, "exports": { ".": { @@ -51,7 +53,8 @@ "build": "vite build", "dev": "vite build --watch", "lint": "eslint . --fix", - "test": "npx vitest --run" + "test": "npx vitest run", + "test:e2e": "npx vitest run --config vitest.e2e.config.ts" }, "sideEffects": false, "type": "module", diff --git a/packages/electric-db-collection/src/electric.ts b/packages/electric-db-collection/src/electric.ts index 22403dbdf..b52ed24ab 100644 --- a/packages/electric-db-collection/src/electric.ts +++ b/packages/electric-db-collection/src/electric.ts @@ -889,6 +889,8 @@ function createElectricSync>( unsubscribeStream() // Abort the abort controller to stop the stream abortController.abort() + // Reset deduplication tracking so collection can load fresh data if restarted + loadSubsetDedupe?.reset() }, } }, diff --git a/packages/electric-db-collection/src/pg-serializer.ts b/packages/electric-db-collection/src/pg-serializer.ts index 707c4e1b8..e19f74f6a 100644 --- a/packages/electric-db-collection/src/pg-serializer.ts +++ b/packages/electric-db-collection/src/pg-serializer.ts @@ -1,26 +1,57 @@ +/** + * Serialize values for Electric SQL subset parameters. + * + * IMPORTANT: Electric expects RAW values, NOT SQL-formatted literals. + * Electric handles all type casting and escaping on the server side. + * The params Record contains the actual values as strings, + * and Electric will parse/cast them based on the column type in the WHERE clause. + * + * @param value - The value to serialize + * @returns The raw value as a string (no SQL formatting/quoting) + */ export function serialize(value: unknown): string { + // Handle null/undefined - return empty string + // Electric interprets empty string as NULL in typed column context + if (value === null || value === undefined) { + return `` + } + + // Handle strings - return as-is (NO quotes, Electric handles escaping) if (typeof value === `string`) { - return `'${value}'` + return value } + // Handle numbers - convert to string if (typeof value === `number`) { return value.toString() } - if (value === null || value === undefined) { - return `NULL` - } - + // Handle booleans - return as lowercase string if (typeof value === `boolean`) { return value ? `true` : `false` } + // Handle dates - return ISO format (NO quotes) if (value instanceof Date) { - return `'${value.toISOString()}'` + return value.toISOString() } + // Handle arrays - for = ANY() operator, serialize as Postgres array literal + // Format: {val1,val2,val3} with proper escaping if (Array.isArray(value)) { - return `ARRAY[${value.map(serialize).join(`,`)}]` + // Postgres array literal format uses curly braces + const elements = value.map((item) => { + if (item === null || item === undefined) { + return `NULL` + } + if (typeof item === `string`) { + // Escape quotes and backslashes for Postgres array literals + const escaped = item.replace(/\\/g, `\\\\`).replace(/"/g, `\\"`) + return `"${escaped}"` + } + return serialize(item) + }) + return `{${elements.join(`,`)}}` } throw new Error(`Cannot serialize value: ${JSON.stringify(value)}`) diff --git a/packages/electric-db-collection/src/sql-compiler.ts b/packages/electric-db-collection/src/sql-compiler.ts index 078840da3..df1eb9b14 100644 --- a/packages/electric-db-collection/src/sql-compiler.ts +++ b/packages/electric-db-collection/src/sql-compiler.ts @@ -26,11 +26,23 @@ export function compileSQL(options: LoadSubsetOptions): SubsetParams { compiledSQL.limit = limit } + // WORKAROUND for Electric bug: Empty subset requests don't load data + // Add dummy "true = true" predicate when there's no where clause + // This is always true so doesn't filter data, just tricks Electric into loading + if (!where) { + compiledSQL.where = `true = true` + } + // Serialize the values in the params array into PG formatted strings // and transform the array into a Record const paramsRecord = params.reduce( (acc, param, index) => { - acc[`${index + 1}`] = serialize(param) + const serialized = serialize(param) + // Only include non-empty values in params + // Empty strings from null/undefined should be omitted + if (serialized !== ``) { + acc[`${index + 1}`] = serialized + } return acc }, {} as Record @@ -42,6 +54,16 @@ export function compileSQL(options: LoadSubsetOptions): SubsetParams { } } +/** + * Quote PostgreSQL identifiers to handle mixed case column names correctly. + * Electric/Postgres requires quotes for case-sensitive identifiers. + * @param name - The identifier to quote + * @returns The quoted identifier + */ +function quoteIdentifier(name: string): string { + return `"${name}"` +} + /** * Compiles the expression to a SQL string and mutates the params array with the values. * @param exp - The expression to compile @@ -63,7 +85,7 @@ function compileBasicExpression( `Compiler can't handle nested properties: ${exp.path.join(`.`)}` ) } - return exp.path[0]! + return quoteIdentifier(exp.path[0]!) case `func`: return compileFunction(exp, params) default: @@ -114,11 +136,46 @@ function compileFunction( compileBasicExpression(arg, params) ) + // Special case for IS NULL / IS NOT NULL - these are postfix operators + if (name === `isNull` || name === `isUndefined`) { + if (compiledArgs.length !== 1) { + throw new Error(`${name} expects 1 argument`) + } + return `${compiledArgs[0]} ${opName}` + } + + // Special case for NOT - unary prefix operator + if (name === `not`) { + if (compiledArgs.length !== 1) { + throw new Error(`NOT expects 1 argument`) + } + // Check if the argument is IS NULL to generate IS NOT NULL + const arg = args[0] + if (arg && arg.type === `func`) { + const funcArg = arg + if (funcArg.name === `isNull` || funcArg.name === `isUndefined`) { + const innerArg = compileBasicExpression(funcArg.args[0]!, params) + return `${innerArg} IS NOT NULL` + } + } + return `${opName} (${compiledArgs[0]})` + } + if (isBinaryOp(name)) { + // Special handling for AND/OR which can be variadic + if ((name === `and` || name === `or`) && compiledArgs.length > 2) { + // Chain multiple arguments: (a AND b AND c) or (a OR b OR c) + return compiledArgs.map((arg) => `(${arg})`).join(` ${opName} `) + } + if (compiledArgs.length !== 2) { throw new Error(`Binary operator ${name} expects 2 arguments`) } const [lhs, rhs] = compiledArgs + // Special case for = ANY operator which needs parentheses around the array parameter + if (name === `in`) { + return `${lhs} ${opName}(${rhs})` + } return `${lhs} ${opName} ${rhs}` } @@ -126,7 +183,7 @@ function compileFunction( } function isBinaryOp(name: string): boolean { - const binaryOps = [`eq`, `gt`, `gte`, `lt`, `lte`, `and`, `or`] + const binaryOps = [`eq`, `gt`, `gte`, `lt`, `lte`, `and`, `or`, `in`] return binaryOps.includes(name) } @@ -143,7 +200,7 @@ function getOpName(name: string): string { not: `NOT`, isUndefined: `IS NULL`, isNull: `IS NULL`, - in: `IN`, + in: `= ANY`, // Use = ANY syntax for array parameters like: `LIKE`, ilike: `ILIKE`, upper: `UPPER`, diff --git a/packages/electric-db-collection/tests/electric-live-query.test.ts b/packages/electric-db-collection/tests/electric-live-query.test.ts index b8047e8cf..5f269099a 100644 --- a/packages/electric-db-collection/tests/electric-live-query.test.ts +++ b/packages/electric-db-collection/tests/electric-live-query.test.ts @@ -536,8 +536,8 @@ describe.each([ mockRequestSnapshot.mock.calls[index]?.[0] expect(callArgs(0)).toMatchObject({ params: { "1": `true` }, - where: `active = $1`, - orderBy: `age NULLS FIRST`, + where: `"active" = $1`, + orderBy: `"age" NULLS FIRST`, limit: 2, }) @@ -600,16 +600,16 @@ describe.each([ // Check that first it requested a limit of 2 users (from first query) expect(callArgs(0)).toMatchObject({ params: { "1": `true` }, - where: `active = $1`, - orderBy: `age NULLS FIRST`, + where: `"active" = $1`, + orderBy: `"age" NULLS FIRST`, limit: 2, }) // Check that second it requested a limit of 6 users (from second query) expect(callArgs(1)).toMatchObject({ params: { "1": `true` }, - where: `active = $1`, - orderBy: `age NULLS FIRST`, + where: `"active" = $1`, + orderBy: `"age" NULLS FIRST`, limit: 6, }) @@ -718,8 +718,8 @@ describe(`Electric Collection with Live Query - syncMode integration`, () => { expect(mockRequestSnapshot).toHaveBeenCalledWith( expect.objectContaining({ limit: 5, // Requests full limit from Electric - orderBy: `age NULLS FIRST`, - where: `active = $1`, + orderBy: `"age" NULLS FIRST`, + where: `"active" = $1`, params: { 1: `true` }, // Parameters are stringified }) ) @@ -786,7 +786,7 @@ describe(`Electric Collection with Live Query - syncMode integration`, () => { expect(mockRequestSnapshot).toHaveBeenCalledWith( expect.objectContaining({ limit: 3, // Requests full limit from Electric - orderBy: `id NULLS FIRST`, + orderBy: `"id" NULLS FIRST`, params: {}, }) ) @@ -861,7 +861,7 @@ describe(`Electric Collection with Live Query - syncMode integration`, () => { expect(mockRequestSnapshot).toHaveBeenCalledWith( expect.objectContaining({ limit: 3, - orderBy: `age NULLS FIRST`, + orderBy: `"age" NULLS FIRST`, }) ) @@ -896,9 +896,9 @@ describe(`Electric Collection with Live Query - syncMode integration`, () => { // Should have requested snapshot with WHERE clause expect(mockRequestSnapshot).toHaveBeenCalledWith( expect.objectContaining({ - where: `active = $1`, + where: `"active" = $1`, params: { "1": `true` }, - orderBy: `name DESC NULLS FIRST`, + orderBy: `"name" DESC NULLS FIRST`, limit: 10, }) ) @@ -939,9 +939,9 @@ describe(`Electric Collection with Live Query - syncMode integration`, () => { // Should have requested snapshot with complex WHERE clause expect(mockRequestSnapshot).toHaveBeenCalledWith( expect.objectContaining({ - where: `age > $1`, + where: `"age" > $1`, params: { "1": `20` }, - orderBy: `age NULLS FIRST`, + orderBy: `"age" NULLS FIRST`, limit: 5, }) ) @@ -1045,9 +1045,9 @@ describe(`Electric Collection - loadSubset deduplication`, () => { expect(mockRequestSnapshot).toHaveBeenCalledTimes(1) expect(mockRequestSnapshot).toHaveBeenCalledWith( expect.objectContaining({ - where: `active = $1`, + where: `"active" = $1`, params: { "1": `true` }, - orderBy: `age NULLS FIRST`, + orderBy: `"age" NULLS FIRST`, limit: 10, }) ) diff --git a/packages/electric-db-collection/tsconfig.json b/packages/electric-db-collection/tsconfig.json index fc6368937..8b2e086d3 100644 --- a/packages/electric-db-collection/tsconfig.json +++ b/packages/electric-db-collection/tsconfig.json @@ -14,9 +14,10 @@ "paths": { "@tanstack/store": ["../store/src"], "@tanstack/db-ivm": ["../db-ivm/src"], - "@tanstack/db": ["../db/src"] + "@tanstack/db": ["../db/src"], + "@tanstack/db-collection-e2e": ["../db-collection-e2e/src"] } }, - "include": ["src", "tests", "vite.config.ts"], + "include": ["src", "tests", "e2e", "vite.config.ts", "vitest.e2e.config.ts"], "exclude": ["node_modules", "dist"] } diff --git a/packages/electric-db-collection/vite.config.ts b/packages/electric-db-collection/vite.config.ts index c7968f28a..b174faf80 100644 --- a/packages/electric-db-collection/vite.config.ts +++ b/packages/electric-db-collection/vite.config.ts @@ -5,10 +5,14 @@ import packageJson from "./package.json" const config = defineConfig({ test: { name: packageJson.name, - dir: `./tests`, + include: [`tests/**/*.test.ts`], + exclude: [`e2e/**/*`], environment: `jsdom`, coverage: { enabled: true, provider: `istanbul`, include: [`src/**/*`] }, - typecheck: { enabled: true }, + typecheck: { + enabled: true, + include: [`tests/**/*.test.ts`, `tests/**/*.test-d.ts`], + }, }, }) diff --git a/packages/electric-db-collection/vitest.e2e.config.ts b/packages/electric-db-collection/vitest.e2e.config.ts new file mode 100644 index 000000000..16afb1170 --- /dev/null +++ b/packages/electric-db-collection/vitest.e2e.config.ts @@ -0,0 +1,11 @@ +import { defineConfig } from "vitest/config" + +export default defineConfig({ + test: { + include: [`e2e/**/*.e2e.test.ts`], + globalSetup: `../db-collection-e2e/support/global-setup.ts`, + fileParallelism: false, // Critical for shared database + testTimeout: 30000, + environment: `jsdom`, + }, +}) diff --git a/packages/query-db-collection/e2e/query-filter.ts b/packages/query-db-collection/e2e/query-filter.ts new file mode 100644 index 000000000..ae45e9ac7 --- /dev/null +++ b/packages/query-db-collection/e2e/query-filter.ts @@ -0,0 +1,517 @@ +/** + * Query-driven sync implementation for Query collection E2E tests + * Uses expression helpers to implement proper predicate push-down + */ + +import { parseLoadSubsetOptions } from "@tanstack/db" +import type { + IR, + LoadSubsetOptions, + ParsedOrderBy, + SimpleComparison, +} from "@tanstack/db" + +const DEBUG_VERBOSE = process.env.DEBUG_QUERY_PUSH === `1` +const DEBUG_SUMMARY = + DEBUG_VERBOSE || process.env.DEBUG_QUERY_PUSH_SUMMARY === `1` +const SIMPLE_OPERATORS = new Set([ + `eq`, + `gt`, + `gte`, + `lt`, + `lte`, + `in`, + `isNull`, + `isUndefined`, + // NOT-wrapped operators (flattened by extractSimpleComparisons) + `not_eq`, + `not_gt`, + `not_gte`, + `not_lt`, + `not_lte`, + `not_in`, + `not_isNull`, + `not_isUndefined`, +]) + +/** + * Build a stable TanStack Query key for load subset options + */ +export function buildQueryKey( + namespace: string, + options: LoadSubsetOptions | undefined +) { + return [`e2e`, namespace, serializeLoadSubsetOptions(options)] +} + +export function serializeLoadSubsetOptions( + options: LoadSubsetOptions | undefined +): unknown { + if (!options) { + return null + } + + const result: Record = {} + + if (options.where) { + result.where = serializeExpression(options.where) + } + + if (options.orderBy?.length) { + result.orderBy = options.orderBy.map((clause) => ({ + expression: serializeExpression(clause.expression), + direction: clause.compareOptions.direction, + nulls: clause.compareOptions.nulls, + })) + } + + if (options.limit !== undefined) { + result.limit = options.limit + } + + return JSON.stringify(Object.keys(result).length === 0 ? null : result) +} + +function serializeExpression(expr: IR.BasicExpression | undefined): unknown { + if (!expr) { + return null + } + + switch (expr.type) { + case `val`: + return { + type: `val`, + value: serializeValue(expr.value), + } + case `ref`: + return { + type: `ref`, + path: [...expr.path], + } + case `func`: + return { + type: `func`, + name: expr.name, + args: expr.args.map((arg) => serializeExpression(arg)), + } + default: + return null + } +} + +function serializeValue(value: unknown): unknown { + if (value === undefined) { + return { __type: `undefined` } + } + + if (typeof value === `number`) { + if (Number.isNaN(value)) { + return { __type: `nan` } + } + if (value === Number.POSITIVE_INFINITY) { + return { __type: `infinity`, sign: 1 } + } + if (value === Number.NEGATIVE_INFINITY) { + return { __type: `infinity`, sign: -1 } + } + } + + if ( + value === null || + typeof value === `string` || + typeof value === `number` || + typeof value === `boolean` + ) { + return value + } + + if (value instanceof Date) { + return { __type: `date`, value: value.toJSON() } + } + + if (Array.isArray(value)) { + return value.map((item) => serializeValue(item)) + } + + if (typeof value === `object`) { + return Object.fromEntries( + Object.entries(value as Record).map(([key, val]) => [ + key, + serializeValue(val), + ]) + ) + } + + return value +} + +type Predicate = (item: T) => boolean + +function isBasicExpression( + expr: IR.BasicExpression | null | undefined +): expr is IR.BasicExpression { + return expr != null +} + +/** + * Apply LoadSubsetOptions to data (filter, sort, limit) + */ +export function applyPredicates( + data: Array, + options: LoadSubsetOptions | undefined +): Array { + if (!options) return data + + const { filters, sorts, limit } = parseLoadSubsetOptions(options) + if (DEBUG_SUMMARY) { + const { limit: rawLimit, where, orderBy } = options + const analysis = analyzeExpression(where) + console.log(`[query-filter] loadSubsetOptions`, { + hasWhere: Boolean(where), + whereType: where?.type, + whereName: where?.type === `func` ? (where as IR.Func).name : undefined, + expressionSummary: analysis, + hasOrderBy: Boolean(orderBy), + limit: rawLimit, + filtersCount: filters.length, + sortsCount: sorts.length, + initialSize: data.length, + }) + } + + let result = [...data] + + // Apply WHERE filtering + const predicate = buildFilterPredicate(options.where, filters) + if (predicate) { + result = result.filter(predicate) + if (DEBUG_SUMMARY) { + console.log(`[query-filter] after where`, { + size: result.length, + }) + } + } + + // Apply ORDER BY + if (sorts.length > 0) { + result.sort((a, b) => compareBySorts(a, b, sorts)) + if (DEBUG_SUMMARY) { + console.log(`[query-filter] after orderBy`, { + size: result.length, + }) + } + } + + // Apply LIMIT + // Note: offset is NOT applied here - it's handled by the live query windowing layer + // The limit passed here already accounts for offset (e.g., offset(20).limit(10) -> limit: 30) + if (limit !== undefined) { + result = result.slice(0, limit) + if (DEBUG_SUMMARY) { + console.log(`[query-filter] after limit`, { + size: result.length, + limit, + }) + } + } + + return result +} + +/** + * Build a predicate function from expression tree + */ +function buildFilterPredicate( + where: IR.BasicExpression | undefined, + filters: Array +): Predicate | undefined { + if (!where) { + return undefined + } + + if (filters.length > 0 && isSimpleExpression(where)) { + return buildSimplePredicate(filters) + } + + try { + return buildExpressionPredicate(where) + } catch (error) { + if (DEBUG_SUMMARY) { + console.warn(`[query-filter] failed to build expression predicate`, error) + } + return undefined + } +} + +function buildSimplePredicate( + filters: Array +): Predicate { + return (item: T) => + filters.every((comparison) => evaluateSimpleComparison(comparison, item)) +} + +function evaluateSimpleComparison( + comparison: SimpleComparison, + item: T +): boolean { + const actualValue = getFieldValue(item, comparison.field) + const expectedValue = comparison.value + + switch (comparison.operator) { + case `eq`: + return actualValue === expectedValue + case `gt`: + return actualValue > expectedValue + case `gte`: + return actualValue >= expectedValue + case `lt`: + return actualValue < expectedValue + case `lte`: + return actualValue <= expectedValue + case `in`: + return Array.isArray(expectedValue) + ? expectedValue.includes(actualValue) + : false + case `isNull`: + return actualValue === null + case `isUndefined`: + return actualValue === undefined + // NOT-wrapped operators (flattened) + case `not_eq`: + return actualValue !== expectedValue + case `not_gt`: + return !(actualValue > expectedValue) + case `not_gte`: + return !(actualValue >= expectedValue) + case `not_lt`: + return !(actualValue < expectedValue) + case `not_lte`: + return !(actualValue <= expectedValue) + case `not_in`: + return Array.isArray(expectedValue) + ? !expectedValue.includes(actualValue) + : true + case `not_isNull`: + return actualValue !== null + case `not_isUndefined`: + return actualValue !== undefined + default: + throw new Error( + `Unsupported simple comparison operator: ${comparison.operator}` + ) + } +} + +function isSimpleExpression(expr: IR.BasicExpression): boolean { + if (expr.type !== `func`) { + return false + } + + if (expr.name === `and`) { + return expr.args.every( + (arg): arg is IR.BasicExpression => + Boolean(arg) && arg.type === `func` && isSimpleExpression(arg) + ) + } + + // Handle NOT wrapping simple expressions + if (expr.name === `not`) { + const [arg] = expr.args + if (!arg || arg.type !== `func`) { + return false + } + // NOT can wrap comparison operators or null checks + return isSimpleExpression(arg) + } + + if (!SIMPLE_OPERATORS.has(expr.name)) { + return false + } + + // Null/undefined checks take a single ref argument + if (expr.name === `isNull` || expr.name === `isUndefined`) { + const [fieldArg] = expr.args + return fieldArg?.type === `ref` + } + + // Comparison operators take ref and val arguments + const [leftArg, rightArg] = expr.args + return leftArg?.type === `ref` && rightArg?.type === `val` +} + +function buildExpressionPredicate( + expr: IR.BasicExpression +): Predicate { + return (item: T) => Boolean(evaluateExpression(expr, item)) +} + +function analyzeExpression(expr: IR.BasicExpression | undefined): + | { + hasIsNull: boolean + hasIsUndefined: boolean + hasEqNull: boolean + rootName?: string + } + | undefined { + if (!expr) return undefined + + const summary = { + hasIsNull: false, + hasIsUndefined: false, + hasEqNull: false, + rootName: expr.type === `func` ? expr.name : undefined, + } + + function walk(node: IR.BasicExpression): void { + if (node.type === `func`) { + if (node.name === `isNull`) summary.hasIsNull = true + if (node.name === `isUndefined`) summary.hasIsUndefined = true + + if (node.name === `eq`) { + const right = node.args[1] + if (right?.type === `val` && right.value === null) { + summary.hasEqNull = true + } + } + + node.args.filter(isBasicExpression).forEach((child) => walk(child)) + } + } + + walk(expr) + return summary +} + +function evaluateExpression(expr: IR.BasicExpression, item: T): any { + switch (expr.type) { + case `val`: + return expr.value + case `ref`: + return getFieldValue(item, expr.path) + case `func`: { + const args = expr.args.map((arg) => evaluateExpression(arg, item)) + return evaluateFunction(expr.name, args) + } + default: + return undefined + } +} + +function evaluateFunction(name: string, args: Array): any { + if (DEBUG_VERBOSE) { + console.log(`[query-filter] operator=${name}`, args) + } + switch (name) { + case `eq`: + return args[0] === args[1] + case `neq`: + case `ne`: + case `notEq`: + return args[0] !== args[1] + case `gt`: + return args[0] > args[1] + case `gte`: + return args[0] >= args[1] + case `lt`: + return args[0] < args[1] + case `lte`: + return args[0] <= args[1] + case `and`: + return args.every(Boolean) + case `or`: + return args.some(Boolean) + case `not`: + return !args[0] + case `in`: + case `inArray`: + return Array.isArray(args[1]) ? args[1].includes(args[0]) : false + case `isNull`: + return args[0] === null + case `isNotNull`: + return args[0] !== null + case `isUndefined`: + return args[0] === undefined + case `isNotUndefined`: + return args[0] !== undefined + default: + throw new Error(`Unsupported predicate operator: ${name}`) + } +} + +function compareBySorts(a: T, b: T, sorts: Array): number { + for (const sort of sorts) { + const aVal = getFieldValue(a, sort.field) + const bVal = getFieldValue(b, sort.field) + + const result = compareValues(aVal, bVal, sort.direction, sort.nulls) + if (result !== 0) { + return result + } + } + + return 0 +} + +function compareValues( + a: any, + b: any, + direction: `asc` | `desc`, + nulls?: `first` | `last` +): number { + const aNull = a === null || a === undefined + const bNull = b === null || b === undefined + + if (aNull || bNull) { + if (aNull && bNull) return 0 + if (nulls === `first`) { + return aNull ? -1 : 1 + } + if (nulls === `last`) { + return aNull ? 1 : -1 + } + // Default SQL behavior: treat nulls as lowest for ASC, highest for DESC + if (direction === `asc`) { + return aNull ? -1 : 1 + } + return aNull ? 1 : -1 + } + + if (a < b) return direction === `asc` ? -1 : 1 + if (a > b) return direction === `asc` ? 1 : -1 + return 0 +} + +/** + * Get nested field value from object + */ +function getFieldValue(obj: any, fieldPath: Array): any { + if (fieldPath.length === 0) { + return undefined + } + + let path = fieldPath + + if ( + path.length > 0 && + obj && + typeof path[0] === `string` && + !(path[0] in obj) + ) { + path = path.slice(1) + } + + if (path.length === 0) { + if (DEBUG_VERBOSE) { + console.log(`[query-filter] getFieldValue alias`, fieldPath, `->`, obj) + } + return obj + } + + const value = path.reduce((current, key) => current?.[key], obj) + + if (DEBUG_VERBOSE) { + console.log(`[query-filter] getFieldValue`, fieldPath, `->`, value) + } + + return value +} diff --git a/packages/query-db-collection/e2e/query.e2e.test.ts b/packages/query-db-collection/e2e/query.e2e.test.ts new file mode 100644 index 000000000..c93b532c0 --- /dev/null +++ b/packages/query-db-collection/e2e/query.e2e.test.ts @@ -0,0 +1,249 @@ +/** + * Query Collection E2E Tests + * + * Tests using Query collections with mock backend + */ + +import { afterAll, afterEach, beforeAll, describe } from "vitest" +import { createCollection } from "@tanstack/db" +import { QueryClient } from "@tanstack/query-core" +import { queryCollectionOptions } from "../src/query" +import { + createCollationTestSuite, + createDeduplicationTestSuite, + createJoinsTestSuite, + createLiveUpdatesTestSuite, + createMutationsTestSuite, + createPaginationTestSuite, + createPredicatesTestSuite, + generateSeedData, +} from "../../db-collection-e2e/src/index" +import { applyPredicates, buildQueryKey } from "./query-filter" +import type { LoadSubsetOptions } from "@tanstack/db" +import type { + Comment as E2EComment, + Post as E2EPost, + E2ETestConfig, + User as E2EUser, +} from "../../db-collection-e2e/src/types" + +describe(`Query Collection E2E Tests`, () => { + let config: E2ETestConfig + let queryClient: QueryClient + + beforeAll(async () => { + // Make seed data mutable so mutations can modify it + const seedData = generateSeedData() + + queryClient = new QueryClient({ + defaultOptions: { + queries: { + staleTime: 0, + retry: false, + }, + }, + }) + + // Create REAL Query collections with mock backend queryFn + const eagerUsers = createCollection( + queryCollectionOptions({ + id: `query-e2e-users-eager`, + queryClient, + queryKey: [`e2e`, `users`, `eager`], + queryFn: () => { + // Mock query function that returns seed data + return Promise.resolve(seedData.users) + }, + getKey: (item: E2EUser) => item.id, + startSync: true, + }) + ) + + const eagerPosts = createCollection( + queryCollectionOptions({ + id: `query-e2e-posts-eager`, + queryClient, + queryKey: [`e2e`, `posts`, `eager`], + queryFn: () => { + return Promise.resolve(seedData.posts) + }, + getKey: (item: E2EPost) => item.id, + startSync: true, + }) + ) + + const eagerComments = createCollection( + queryCollectionOptions({ + id: `query-e2e-comments-eager`, + queryClient, + queryKey: [`e2e`, `comments`, `eager`], + queryFn: () => { + return Promise.resolve(seedData.comments) + }, + getKey: (item: E2EComment) => item.id, + startSync: true, + }) + ) + + const onDemandUsers = createCollection( + queryCollectionOptions({ + id: `query-e2e-users-ondemand`, + queryClient, + // Function-based queryKey that derives a serializable key from predicate options + // This ensures different predicates create separate TanStack Query observers + queryKey: (opts) => buildQueryKey(`users`, opts), + syncMode: `on-demand`, + queryFn: (ctx) => { + const options = ctx.meta?.loadSubsetOptions as + | LoadSubsetOptions + | undefined + const filtered = applyPredicates(seedData.users, options) + return Promise.resolve(filtered) + }, + getKey: (item: E2EUser) => item.id, + startSync: false, + }) + ) + + const onDemandPosts = createCollection( + queryCollectionOptions({ + id: `query-e2e-posts-ondemand`, + queryClient, + queryKey: (opts) => buildQueryKey(`posts`, opts), + syncMode: `on-demand`, + queryFn: (ctx) => { + const options = ctx.meta?.loadSubsetOptions as + | LoadSubsetOptions + | undefined + const filtered = applyPredicates(seedData.posts, options) + return Promise.resolve(filtered) + }, + getKey: (item: E2EPost) => item.id, + startSync: false, + }) + ) + + const onDemandComments = createCollection( + queryCollectionOptions({ + id: `query-e2e-comments-ondemand`, + queryClient, + queryKey: (opts) => buildQueryKey(`comments`, opts), + syncMode: `on-demand`, + queryFn: (ctx) => { + const options = ctx.meta?.loadSubsetOptions as + | LoadSubsetOptions + | undefined + const filtered = applyPredicates(seedData.comments, options) + return Promise.resolve(filtered) + }, + getKey: (item: E2EComment) => item.id, + startSync: false, + }) + ) + + // Wait for eager collections to load + await eagerUsers.preload() + await eagerPosts.preload() + await eagerComments.preload() + + // On-demand collections don't start automatically + await onDemandUsers.preload() + await onDemandPosts.preload() + await onDemandComments.preload() + + config = { + collections: { + eager: { + users: eagerUsers as any, + posts: eagerPosts as any, + comments: eagerComments as any, + }, + onDemand: { + users: onDemandUsers as any, + posts: onDemandPosts as any, + comments: onDemandComments as any, + }, + }, + // Mutations for Query collections - modify seed data and invalidate queries + mutations: { + insertUser: async (user) => { + seedData.users.push(user) + await queryClient.invalidateQueries({ queryKey: [`e2e`, `users`] }) + }, + updateUser: async (id, updates) => { + const userIndex = seedData.users.findIndex((u) => u.id === id) + if (userIndex !== -1) { + seedData.users[userIndex] = { + ...seedData.users[userIndex]!, + ...updates, + } + await queryClient.invalidateQueries({ queryKey: [`e2e`, `users`] }) + } + }, + deleteUser: async (id) => { + const userIndex = seedData.users.findIndex((u) => u.id === id) + if (userIndex !== -1) { + seedData.users.splice(userIndex, 1) + await queryClient.invalidateQueries({ queryKey: [`e2e`, `users`] }) + } + }, + insertPost: async (post) => { + seedData.posts.push(post) + await queryClient.invalidateQueries({ queryKey: [`e2e`, `posts`] }) + }, + }, + setup: async () => {}, + afterEach: async () => { + // Clean up and restart on-demand collections + // This validates cleanup() works and each test starts fresh + await onDemandUsers.cleanup() + await onDemandPosts.cleanup() + await onDemandComments.cleanup() + + // Restart sync after cleanup + onDemandUsers.startSyncImmediate() + onDemandPosts.startSyncImmediate() + onDemandComments.startSyncImmediate() + + // Wait for collections to be ready + await onDemandUsers.preload() + await onDemandPosts.preload() + await onDemandComments.preload() + }, + teardown: async () => { + await Promise.all([ + eagerUsers.cleanup(), + eagerPosts.cleanup(), + eagerComments.cleanup(), + onDemandUsers.cleanup(), + onDemandPosts.cleanup(), + onDemandComments.cleanup(), + ]) + queryClient.clear() + }, + } + }) + + afterEach(async () => { + if (config.afterEach) { + await config.afterEach() + } + }) + + afterAll(async () => { + await config.teardown() + }) + + function getConfig() { + return Promise.resolve(config) + } + + // Run all test suites + createPredicatesTestSuite(getConfig) + createPaginationTestSuite(getConfig) + createJoinsTestSuite(getConfig) + createDeduplicationTestSuite(getConfig) + createCollationTestSuite(getConfig) + createMutationsTestSuite(getConfig) + createLiveUpdatesTestSuite(getConfig) +}) diff --git a/packages/query-db-collection/package.json b/packages/query-db-collection/package.json index 02f00b973..ae2f124e8 100644 --- a/packages/query-db-collection/package.json +++ b/packages/query-db-collection/package.json @@ -53,7 +53,8 @@ "build": "vite build", "dev": "vite build --watch", "lint": "eslint . --fix", - "test": "npx vitest --run" + "test": "npx vitest run", + "test:e2e": "npx vitest run --config vitest.e2e.config.ts" }, "sideEffects": false, "type": "module", diff --git a/packages/query-db-collection/tests/query.test.ts b/packages/query-db-collection/tests/query.test.ts index 57dbd87ae..2f218d73a 100644 --- a/packages/query-db-collection/tests/query.test.ts +++ b/packages/query-db-collection/tests/query.test.ts @@ -9,13 +9,13 @@ import { import { queryCollectionOptions } from "../src/query" import type { QueryFunctionContext } from "@tanstack/query-core" import type { - CollectionImpl, + Collection, DeleteMutationFnParams, InsertMutationFnParams, TransactionWithMutations, UpdateMutationFnParams, } from "@tanstack/db" -import type { QueryCollectionConfig } from "../src/query" +import type { QueryCollectionConfig, QueryCollectionUtils } from "../src/query" interface TestItem { id: string @@ -620,21 +620,45 @@ describe(`QueryCollection`, () => { mutations: [] as any, } as TransactionWithMutations - const insertMockParams: InsertMutationFnParams = { + const mockCollection = { + utils: {} as QueryCollectionUtils< + TestItem, + string | number, + TestItem, + unknown + >, + } as unknown as Collection< + TestItem, + string | number, + QueryCollectionUtils, + never, + TestItem + > + + const insertMockParams = { transaction: insertTransaction, - // @ts-ignore not testing this - collection: {} as CollectionImpl, - } - const updateMockParams: UpdateMutationFnParams = { + collection: mockCollection, + } as InsertMutationFnParams< + TestItem, + string | number, + QueryCollectionUtils + > + const updateMockParams = { transaction: updateTransaction, - // @ts-ignore not testing this - collection: {} as CollectionImpl, - } - const deleteMockParams: DeleteMutationFnParams = { + collection: mockCollection, + } as UpdateMutationFnParams< + TestItem, + string | number, + QueryCollectionUtils + > + const deleteMockParams = { transaction: deleteTransaction, - // @ts-ignore not testing this - collection: {} as CollectionImpl, - } + collection: mockCollection, + } as DeleteMutationFnParams< + TestItem, + string | number, + QueryCollectionUtils + > // Create handlers const onInsert = vi.fn().mockResolvedValue(undefined) @@ -672,12 +696,6 @@ describe(`QueryCollection`, () => { mutations: [] as any, } as TransactionWithMutations - const insertMockParams: InsertMutationFnParams = { - transaction: insertTransaction, - // @ts-ignore not testing this - collection: {} as CollectionImpl, - } - // Create handlers with different return values const onInsertDefault = vi.fn().mockResolvedValue(undefined) // Default behavior should refetch const onInsertFalse = vi.fn().mockResolvedValue({ refetch: false }) // No refetch @@ -722,10 +740,19 @@ describe(`QueryCollection`, () => { // Clear initial call queryFnDefault.mockClear() - await optionsDefault.onInsert!(insertMockParams) + const insertParamsDefault = { + transaction: insertTransaction, + collection: collectionDefault, + } satisfies InsertMutationFnParams< + TestItem, + string | number, + QueryCollectionUtils + > + + await optionsDefault.onInsert!(insertParamsDefault) // Verify handler was called and refetch was triggered (queryFn called again) - expect(onInsertDefault).toHaveBeenCalledWith(insertMockParams) + expect(onInsertDefault).toHaveBeenCalledWith(insertParamsDefault) await vi.waitFor(() => { expect(queryFnDefault).toHaveBeenCalledTimes(1) }) @@ -742,10 +769,19 @@ describe(`QueryCollection`, () => { // Clear initial call queryFnFalse.mockClear() - await optionsFalse.onInsert!(insertMockParams) + const insertParamsFalse = { + transaction: insertTransaction, + collection: collectionFalse, + } satisfies InsertMutationFnParams< + TestItem, + string | number, + QueryCollectionUtils + > + + await optionsFalse.onInsert!(insertParamsFalse) // Verify handler was called but refetch was NOT triggered (queryFn not called) - expect(onInsertFalse).toHaveBeenCalledWith(insertMockParams) + expect(onInsertFalse).toHaveBeenCalledWith(insertParamsFalse) // Wait a bit to ensure no refetch happens await new Promise((resolve) => setTimeout(resolve, 50)) expect(queryFnFalse).not.toHaveBeenCalled() @@ -2913,8 +2949,6 @@ describe(`QueryCollection`, () => { const loadSubsetOptions = meta?.loadSubsetOptions ?? {} const { where } = loadSubsetOptions - console.log(`In queryFn:\n`, JSON.stringify(where, null, 2)) - // Query 1: items 1, 2, 3 (where: { category: 'A' }) if (isCategory(`A`, where)) { console.log(`Is category A`) diff --git a/packages/query-db-collection/tsconfig.json b/packages/query-db-collection/tsconfig.json index 623d4bd91..3d3c48bdd 100644 --- a/packages/query-db-collection/tsconfig.json +++ b/packages/query-db-collection/tsconfig.json @@ -14,9 +14,10 @@ "paths": { "@tanstack/store": ["../store/src"], "@tanstack/db": ["../db/src"], - "@tanstack/db-ivm": ["../db-ivm/src"] + "@tanstack/db-ivm": ["../db-ivm/src"], + "@tanstack/db-collection-e2e": ["../db-collection-e2e/src"] } }, - "include": ["src", "tests", "vite.config.ts"], + "include": ["src", "tests", "e2e", "vite.config.ts", "vitest.e2e.config.ts"], "exclude": ["node_modules", "dist"] } diff --git a/packages/query-db-collection/vite.config.ts b/packages/query-db-collection/vite.config.ts index c7968f28a..b174faf80 100644 --- a/packages/query-db-collection/vite.config.ts +++ b/packages/query-db-collection/vite.config.ts @@ -5,10 +5,14 @@ import packageJson from "./package.json" const config = defineConfig({ test: { name: packageJson.name, - dir: `./tests`, + include: [`tests/**/*.test.ts`], + exclude: [`e2e/**/*`], environment: `jsdom`, coverage: { enabled: true, provider: `istanbul`, include: [`src/**/*`] }, - typecheck: { enabled: true }, + typecheck: { + enabled: true, + include: [`tests/**/*.test.ts`, `tests/**/*.test-d.ts`], + }, }, }) diff --git a/packages/query-db-collection/vitest.e2e.config.ts b/packages/query-db-collection/vitest.e2e.config.ts new file mode 100644 index 000000000..81e8f894b --- /dev/null +++ b/packages/query-db-collection/vitest.e2e.config.ts @@ -0,0 +1,10 @@ +import { defineConfig } from "vitest/config" + +export default defineConfig({ + test: { + include: [`e2e/**/*.e2e.test.ts`], + fileParallelism: false, + testTimeout: 30000, + environment: `jsdom`, + }, +}) diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index 154e14613..c7ccb1116 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -124,7 +124,7 @@ importers: version: 7.1.12(@types/node@24.7.0)(jiti@2.6.1)(lightningcss@1.30.2)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.6)(yaml@2.8.1) vitest: specifier: ^3.2.4 - version: 3.2.4(@types/debug@4.1.12)(@types/node@24.7.0)(jiti@2.6.1)(jsdom@27.0.1(postcss@8.5.6))(lightningcss@1.30.2)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.6)(yaml@2.8.1) + version: 3.2.4(@types/debug@4.1.12)(@types/node@24.7.0)(@vitest/ui@3.2.4)(jiti@2.6.1)(jsdom@27.0.1(postcss@8.5.6))(lightningcss@1.30.2)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.6)(yaml@2.8.1) zod: specifier: ^3.25.76 version: 3.25.76 @@ -167,7 +167,7 @@ importers: devDependencies: '@angular/build': specifier: ^20.3.7 - version: 20.3.7(@angular/compiler-cli@20.3.7(@angular/compiler@20.3.7)(typescript@5.8.3))(@angular/compiler@20.3.7)(@angular/core@20.3.7(@angular/compiler@20.3.7)(rxjs@7.8.2)(zone.js@0.15.1))(@angular/platform-browser@20.3.7(@angular/common@20.3.7(@angular/core@20.3.7(@angular/compiler@20.3.7)(rxjs@7.8.2)(zone.js@0.15.1))(rxjs@7.8.2))(@angular/core@20.3.7(@angular/compiler@20.3.7)(rxjs@7.8.2)(zone.js@0.15.1)))(@types/node@24.7.0)(chokidar@4.0.3)(jiti@2.6.1)(karma@6.4.4)(lightningcss@1.30.2)(postcss@8.5.6)(tailwindcss@3.4.18)(terser@5.44.0)(tslib@2.8.1)(tsx@4.20.6)(typescript@5.8.3)(vitest@3.2.4(@types/debug@4.1.12)(@types/node@24.7.0)(jiti@2.6.1)(jsdom@27.0.1(postcss@8.5.6))(lightningcss@1.30.2)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.6)(yaml@2.8.1))(yaml@2.8.1) + version: 20.3.7(@angular/compiler-cli@20.3.7(@angular/compiler@20.3.7)(typescript@5.8.3))(@angular/compiler@20.3.7)(@angular/core@20.3.7(@angular/compiler@20.3.7)(rxjs@7.8.2)(zone.js@0.15.1))(@angular/platform-browser@20.3.7(@angular/common@20.3.7(@angular/core@20.3.7(@angular/compiler@20.3.7)(rxjs@7.8.2)(zone.js@0.15.1))(rxjs@7.8.2))(@angular/core@20.3.7(@angular/compiler@20.3.7)(rxjs@7.8.2)(zone.js@0.15.1)))(@types/node@24.7.0)(chokidar@4.0.3)(jiti@2.6.1)(karma@6.4.4)(lightningcss@1.30.2)(postcss@8.5.6)(tailwindcss@3.4.18)(terser@5.44.0)(tslib@2.8.1)(tsx@4.20.6)(typescript@5.8.3)(vitest@3.2.4)(yaml@2.8.1) '@angular/cli': specifier: ^20.3.7 version: 20.3.7(@types/node@24.7.0)(chokidar@4.0.3) @@ -446,7 +446,7 @@ importers: version: 5.9.3 vitest: specifier: ^3.2.4 - version: 3.2.4(@types/debug@4.1.12)(@types/node@24.7.0)(jiti@2.6.1)(jsdom@27.0.1(postcss@8.5.6))(lightningcss@1.30.2)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.6)(yaml@2.8.1) + version: 3.2.4(@types/debug@4.1.12)(@types/node@24.7.0)(@vitest/ui@3.2.4)(jiti@2.6.1)(jsdom@27.0.1(postcss@8.5.6))(lightningcss@1.30.2)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.6)(yaml@2.8.1) web-vitals: specifier: ^5.1.0 version: 5.1.0 @@ -701,7 +701,7 @@ importers: version: 19.2.15(@angular/common@19.2.15(@angular/core@19.2.15(rxjs@7.8.2)(zone.js@0.15.1))(rxjs@7.8.2))(@angular/compiler@19.2.15)(@angular/core@19.2.15(rxjs@7.8.2)(zone.js@0.15.1))(@angular/platform-browser@19.2.15(@angular/common@19.2.15(@angular/core@19.2.15(rxjs@7.8.2)(zone.js@0.15.1))(rxjs@7.8.2))(@angular/core@19.2.15(rxjs@7.8.2)(zone.js@0.15.1))) '@vitest/coverage-istanbul': specifier: ^3.2.4 - version: 3.2.4(vitest@3.2.4(@types/debug@4.1.12)(@types/node@24.7.0)(jiti@2.6.1)(jsdom@27.0.1(postcss@8.5.6))(lightningcss@1.30.2)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.6)(yaml@2.8.1)) + version: 3.2.4(vitest@3.2.4) rxjs: specifier: ^7.8.2 version: 7.8.2 @@ -726,7 +726,7 @@ importers: devDependencies: '@vitest/coverage-istanbul': specifier: ^3.2.4 - version: 3.2.4(vitest@3.2.4(@types/debug@4.1.12)(@types/node@24.7.0)(jiti@2.6.1)(jsdom@27.0.1(postcss@8.5.6))(lightningcss@1.30.2)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.6)(yaml@2.8.1)) + version: 3.2.4(vitest@3.2.4) arktype: specifier: ^2.1.23 version: 2.1.23 @@ -737,6 +737,37 @@ importers: specifier: ^0.3.0 version: 0.3.0 + packages/db-collection-e2e: + dependencies: + '@tanstack/db': + specifier: workspace:* + version: link:../db + '@tanstack/electric-db-collection': + specifier: workspace:* + version: link:../electric-db-collection + '@tanstack/query-db-collection': + specifier: workspace:* + version: link:../query-db-collection + pg: + specifier: ^8.13.1 + version: 8.16.3 + devDependencies: + '@types/pg': + specifier: ^8.11.10 + version: 8.15.5 + '@vitest/ui': + specifier: ^3.2.4 + version: 3.2.4(vitest@3.2.4) + typescript: + specifier: ^5.9.2 + version: 5.9.3 + vite: + specifier: ^7.1.12 + version: 7.1.12(@types/node@24.7.0)(jiti@2.6.1)(lightningcss@1.30.2)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.6)(yaml@2.8.1) + vitest: + specifier: ^3.2.4 + version: 3.2.4(@types/debug@4.1.12)(@types/node@24.7.0)(@vitest/ui@3.2.4)(jiti@2.6.1)(jsdom@27.0.1(postcss@8.5.6))(lightningcss@1.30.2)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.6)(yaml@2.8.1) + packages/db-ivm: dependencies: fractional-indexing: @@ -754,7 +785,7 @@ importers: version: 4.1.12 '@vitest/coverage-istanbul': specifier: ^3.2.4 - version: 3.2.4(vitest@3.2.4(@types/debug@4.1.12)(@types/node@24.7.0)(jiti@2.6.1)(jsdom@27.0.1(postcss@8.5.6))(lightningcss@1.30.2)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.6)(yaml@2.8.1)) + version: 3.2.4(vitest@3.2.4) packages/electric-db-collection: dependencies: @@ -777,9 +808,15 @@ importers: '@types/debug': specifier: ^4.1.12 version: 4.1.12 + '@types/pg': + specifier: ^8.11.10 + version: 8.15.5 '@vitest/coverage-istanbul': specifier: ^3.2.4 - version: 3.2.4(vitest@3.2.4(@types/debug@4.1.12)(@types/node@24.7.0)(jiti@2.6.1)(jsdom@27.0.1(postcss@8.5.6))(lightningcss@1.30.2)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.6)(yaml@2.8.1)) + version: 3.2.4(vitest@3.2.4) + pg: + specifier: ^8.13.1 + version: 8.16.3 packages/offline-transactions: devDependencies: @@ -797,7 +834,7 @@ importers: version: 5.9.3 vitest: specifier: ^3.2.4 - version: 3.2.4(@types/debug@4.1.12)(@types/node@20.19.24)(jiti@2.6.1)(jsdom@27.0.1(postcss@8.5.6))(lightningcss@1.30.2)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.6)(yaml@2.8.1) + version: 3.2.4(@types/debug@4.1.12)(@types/node@20.19.24)(@vitest/ui@3.2.4)(jiti@2.6.1)(jsdom@27.0.1(postcss@8.5.6))(lightningcss@1.30.2)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.6)(yaml@2.8.1) packages/powersync-db-collection: dependencies: @@ -828,7 +865,7 @@ importers: version: 4.1.12 '@vitest/coverage-istanbul': specifier: ^3.2.4 - version: 3.2.4(vitest@3.2.4(@types/debug@4.1.12)(@types/node@24.7.0)(jiti@2.6.1)(jsdom@27.0.1(postcss@8.5.6))(lightningcss@1.30.2)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.6)(yaml@2.8.1)) + version: 3.2.4(vitest@3.2.4) packages/query-db-collection: dependencies: @@ -847,7 +884,7 @@ importers: version: 5.90.5 '@vitest/coverage-istanbul': specifier: ^3.2.4 - version: 3.2.4(vitest@3.2.4(@types/debug@4.1.12)(@types/node@24.7.0)(jiti@2.6.1)(jsdom@27.0.1(postcss@8.5.6))(lightningcss@1.30.2)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.6)(yaml@2.8.1)) + version: 3.2.4(vitest@3.2.4) packages/react-db: dependencies: @@ -875,7 +912,7 @@ importers: version: 1.5.0 '@vitest/coverage-istanbul': specifier: ^3.2.4 - version: 3.2.4(vitest@3.2.4(@types/debug@4.1.12)(@types/node@24.7.0)(jiti@2.6.1)(jsdom@27.0.1(postcss@8.5.6))(lightningcss@1.30.2)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.6)(yaml@2.8.1)) + version: 3.2.4(vitest@3.2.4) react: specifier: ^19.2.0 version: 19.2.0 @@ -912,7 +949,7 @@ importers: version: 4.1.12 '@vitest/coverage-istanbul': specifier: ^3.2.4 - version: 3.2.4(vitest@3.2.4(@types/debug@4.1.12)(@types/node@24.7.0)(jiti@2.6.1)(jsdom@27.0.1(postcss@8.5.6))(lightningcss@1.30.2)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.6)(yaml@2.8.1)) + version: 3.2.4(vitest@3.2.4) packages/solid-db: dependencies: @@ -931,7 +968,7 @@ importers: version: 0.8.10(solid-js@1.9.9) '@vitest/coverage-istanbul': specifier: ^3.2.4 - version: 3.2.4(vitest@3.2.4(@types/debug@4.1.12)(@types/node@24.7.0)(jiti@2.6.1)(jsdom@27.0.1(postcss@8.5.6))(lightningcss@1.30.2)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.6)(yaml@2.8.1)) + version: 3.2.4(vitest@3.2.4) jsdom: specifier: ^27.0.1 version: 27.0.1(postcss@8.5.6) @@ -943,7 +980,7 @@ importers: version: 2.11.10(@testing-library/jest-dom@6.9.1)(solid-js@1.9.9)(vite@7.1.12(@types/node@24.7.0)(jiti@2.6.1)(lightningcss@1.30.2)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.6)(yaml@2.8.1)) vitest: specifier: ^3.2.4 - version: 3.2.4(@types/debug@4.1.12)(@types/node@24.7.0)(jiti@2.6.1)(jsdom@27.0.1(postcss@8.5.6))(lightningcss@1.30.2)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.6)(yaml@2.8.1) + version: 3.2.4(@types/debug@4.1.12)(@types/node@24.7.0)(@vitest/ui@3.2.4)(jiti@2.6.1)(jsdom@27.0.1(postcss@8.5.6))(lightningcss@1.30.2)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.6)(yaml@2.8.1) packages/svelte-db: dependencies: @@ -959,7 +996,7 @@ importers: version: 6.2.1(svelte@5.42.2)(vite@7.1.12(@types/node@24.7.0)(jiti@2.6.1)(lightningcss@1.30.2)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.6)(yaml@2.8.1)) '@vitest/coverage-istanbul': specifier: ^3.2.4 - version: 3.2.4(vitest@3.2.4(@types/debug@4.1.12)(@types/node@24.7.0)(jiti@2.6.1)(jsdom@27.0.1(postcss@8.5.6))(lightningcss@1.30.2)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.6)(yaml@2.8.1)) + version: 3.2.4(vitest@3.2.4) publint: specifier: ^0.3.15 version: 0.3.15 @@ -996,7 +1033,7 @@ importers: version: 4.1.12 '@vitest/coverage-istanbul': specifier: ^3.2.4 - version: 3.2.4(vitest@3.2.4(@types/debug@4.1.12)(@types/node@24.7.0)(jiti@2.6.1)(jsdom@27.0.1(postcss@8.5.6))(lightningcss@1.30.2)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.6)(yaml@2.8.1)) + version: 3.2.4(vitest@3.2.4) packages/vue-db: dependencies: @@ -1012,7 +1049,7 @@ importers: version: 6.0.1(vite@7.1.12(@types/node@24.7.0)(jiti@2.6.1)(lightningcss@1.30.2)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.6)(yaml@2.8.1))(vue@3.5.22(typescript@5.9.3)) '@vitest/coverage-istanbul': specifier: ^3.2.4 - version: 3.2.4(vitest@3.2.4(@types/debug@4.1.12)(@types/node@24.7.0)(jiti@2.6.1)(jsdom@27.0.1(postcss@8.5.6))(lightningcss@1.30.2)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.6)(yaml@2.8.1)) + version: 3.2.4(vitest@3.2.4) vue: specifier: ^3.5.22 version: 3.5.22(typescript@5.9.3) @@ -3007,6 +3044,9 @@ packages: resolution: {integrity: sha512-QNqXyfVS2wm9hweSYD2O7F0G06uurj9kZ96TRQE5Y9hU7+tgdZwIkbAKc5Ocy1HxEY2kuDQa6cQ1WRs/O5LFKA==} engines: {node: ^12.20.0 || ^14.18.0 || >=16.0.0} + '@polka/url@1.0.0-next.29': + resolution: {integrity: sha512-wwQAWhWSuHaag8c4q/KN/vCoeOJYshAIvMQwD4GpSb3OiZklFfvAgmj0VCBBImRpuF/aFgIRzllXlVX93Jevww==} + '@powersync/common@1.41.0': resolution: {integrity: sha512-N5Tpp2QU0RnP9eEDu+/Ok/IccSk8sNxnCBQgByEHg+DAHvSeSpIFe4nDSZwaRJrcltKi/FMzzjSIBjFrtsUz/g==} @@ -4322,6 +4362,11 @@ packages: '@vitest/spy@3.2.4': resolution: {integrity: sha512-vAfasCOe6AIK70iP5UD11Ac4siNUNJ9i/9PZ3NKx07sG6sUxeag1LWdNrMWeKKYBLlzuK+Gn65Yd5nyL6ds+nw==} + '@vitest/ui@3.2.4': + resolution: {integrity: sha512-hGISOaP18plkzbWEcP/QvtRW1xDXF2+96HbEX6byqQhAUbiS5oH6/9JwW+QsQCIYON2bI6QZBF+2PvOmrRZ9wA==} + peerDependencies: + vitest: 3.2.4 + '@vitest/utils@3.2.4': resolution: {integrity: sha512-fB2V0JFrQSMsCo9HiSq3Ezpdv4iYaXRG1Sx8edX3MwxfyNn83mKiGzOcH+Fkxt4MHxr3y42fQi1oeAInqgX2QA==} @@ -5666,6 +5711,9 @@ packages: fetchdts@0.1.7: resolution: {integrity: sha512-YoZjBdafyLIop9lSxXVI33oLD5kN31q4Td+CasofLLYeLXRFeOsuOw0Uo+XNRi9PZlbfdlN2GmRtm4tCEQ9/KA==} + fflate@0.8.2: + resolution: {integrity: sha512-cPJU47OaAoCbg0pBvzsgpTPhmhqI5eJjh/JIu8tPj5q+T7iLvW/JAYUqmE7KOB4R1ZyEhzBaIQpQpardBF5z8A==} + file-entry-cache@6.0.1: resolution: {integrity: sha512-7Gps/XWymbLk2QLYK4NzpMOrYjMhdIxXuIvy2QBsLE6ljuodKvdkWs/cpyJJ3CVIVpH0Oi1Hvg1ovbMzLdFBBg==} engines: {node: ^10.12.0 || >=12.0.0} @@ -7892,6 +7940,10 @@ packages: simple-peer@9.11.1: resolution: {integrity: sha512-D1SaWpOW8afq1CZGWB8xTfrT3FekjQmPValrqncJMX7QFl8YwhrPTZvMCANLtgBwwdS+7zURyqxDDEmY558tTw==} + sirv@3.0.2: + resolution: {integrity: sha512-2wcC/oGxHis/BoHkkPwldgiPSYcpZK3JU28WoMVv55yHJgcZ8rlXvuG9iZggz+sU1d4bRgIGASwyWqjxu3FM0g==} + engines: {node: '>=18'} + slash@3.0.0: resolution: {integrity: sha512-g9Q1haeby36OSStwb4ntCGGGaKsaVSjQ68fBxoQcutl5fS1vuY18H3wSt3jFyFtrkx+Kz0V1G85A4MyAdDMi2Q==} engines: {node: '>=8'} @@ -8257,6 +8309,10 @@ packages: resolution: {integrity: sha512-o5sSPKEkg/DIQNmH43V0/uerLrpzVedkUh8tGNvaeXpfpuwjKenlSox/2O/BTlZUtEe+JG7s5YhEz608PlAHRA==} engines: {node: '>=0.6'} + totalist@3.0.1: + resolution: {integrity: sha512-sf4i37nQ2LBx4m3wB74y+ubopq6W/dIzXg0FDGjsYnZHVa1Da8FH853wlL2gtUhg+xJXjfk3kUZS3BRoQeoQBQ==} + engines: {node: '>=6'} + tough-cookie@6.0.0: resolution: {integrity: sha512-kXuRi1mtaKMrsLUxz3sQYvVl37B0Ns6MzfrtV5DvJceE9bPyspOqk9xxv7XbZWcfLWbFmm997vl83qUWVJA64w==} engines: {node: '>=16'} @@ -9072,7 +9128,7 @@ snapshots: transitivePeerDependencies: - chokidar - '@angular/build@20.3.7(@angular/compiler-cli@20.3.7(@angular/compiler@20.3.7)(typescript@5.8.3))(@angular/compiler@20.3.7)(@angular/core@20.3.7(@angular/compiler@20.3.7)(rxjs@7.8.2)(zone.js@0.15.1))(@angular/platform-browser@20.3.7(@angular/common@20.3.7(@angular/core@20.3.7(@angular/compiler@20.3.7)(rxjs@7.8.2)(zone.js@0.15.1))(rxjs@7.8.2))(@angular/core@20.3.7(@angular/compiler@20.3.7)(rxjs@7.8.2)(zone.js@0.15.1)))(@types/node@24.7.0)(chokidar@4.0.3)(jiti@2.6.1)(karma@6.4.4)(lightningcss@1.30.2)(postcss@8.5.6)(tailwindcss@3.4.18)(terser@5.44.0)(tslib@2.8.1)(tsx@4.20.6)(typescript@5.8.3)(vitest@3.2.4(@types/debug@4.1.12)(@types/node@24.7.0)(jiti@2.6.1)(jsdom@27.0.1(postcss@8.5.6))(lightningcss@1.30.2)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.6)(yaml@2.8.1))(yaml@2.8.1)': + '@angular/build@20.3.7(@angular/compiler-cli@20.3.7(@angular/compiler@20.3.7)(typescript@5.8.3))(@angular/compiler@20.3.7)(@angular/core@20.3.7(@angular/compiler@20.3.7)(rxjs@7.8.2)(zone.js@0.15.1))(@angular/platform-browser@20.3.7(@angular/common@20.3.7(@angular/core@20.3.7(@angular/compiler@20.3.7)(rxjs@7.8.2)(zone.js@0.15.1))(rxjs@7.8.2))(@angular/core@20.3.7(@angular/compiler@20.3.7)(rxjs@7.8.2)(zone.js@0.15.1)))(@types/node@24.7.0)(chokidar@4.0.3)(jiti@2.6.1)(karma@6.4.4)(lightningcss@1.30.2)(postcss@8.5.6)(tailwindcss@3.4.18)(terser@5.44.0)(tslib@2.8.1)(tsx@4.20.6)(typescript@5.8.3)(vitest@3.2.4)(yaml@2.8.1)': dependencies: '@ampproject/remapping': 2.3.0 '@angular-devkit/architect': 0.2003.7(chokidar@4.0.3) @@ -9111,7 +9167,7 @@ snapshots: lmdb: 3.4.2 postcss: 8.5.6 tailwindcss: 3.4.18 - vitest: 3.2.4(@types/debug@4.1.12)(@types/node@24.7.0)(jiti@2.6.1)(jsdom@27.0.1(postcss@8.5.6))(lightningcss@1.30.2)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.6)(yaml@2.8.1) + vitest: 3.2.4(@types/debug@4.1.12)(@types/node@24.7.0)(@vitest/ui@3.2.4)(jiti@2.6.1)(jsdom@27.0.1(postcss@8.5.6))(lightningcss@1.30.2)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.6)(yaml@2.8.1) transitivePeerDependencies: - '@types/node' - chokidar @@ -11047,6 +11103,8 @@ snapshots: '@pkgr/core@0.2.9': {} + '@polka/url@1.0.0-next.29': {} + '@powersync/common@1.41.0': dependencies: js-logger: 1.6.1 @@ -12740,7 +12798,7 @@ snapshots: vite: 7.1.12(@types/node@24.7.0)(jiti@2.6.1)(lightningcss@1.30.2)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.6)(yaml@2.8.1) vue: 3.5.22(typescript@5.9.3) - '@vitest/coverage-istanbul@3.2.4(vitest@3.2.4(@types/debug@4.1.12)(@types/node@24.7.0)(jiti@2.6.1)(jsdom@27.0.1(postcss@8.5.6))(lightningcss@1.30.2)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.6)(yaml@2.8.1))': + '@vitest/coverage-istanbul@3.2.4(vitest@3.2.4)': dependencies: '@istanbuljs/schema': 0.1.3 debug: 4.4.3 @@ -12752,7 +12810,7 @@ snapshots: magicast: 0.3.5 test-exclude: 7.0.1 tinyrainbow: 2.0.0 - vitest: 3.2.4(@types/debug@4.1.12)(@types/node@24.7.0)(jiti@2.6.1)(jsdom@27.0.1(postcss@8.5.6))(lightningcss@1.30.2)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.6)(yaml@2.8.1) + vitest: 3.2.4(@types/debug@4.1.12)(@types/node@24.7.0)(@vitest/ui@3.2.4)(jiti@2.6.1)(jsdom@27.0.1(postcss@8.5.6))(lightningcss@1.30.2)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.6)(yaml@2.8.1) transitivePeerDependencies: - supports-color @@ -12800,6 +12858,17 @@ snapshots: dependencies: tinyspy: 4.0.3 + '@vitest/ui@3.2.4(vitest@3.2.4)': + dependencies: + '@vitest/utils': 3.2.4 + fflate: 0.8.2 + flatted: 3.3.3 + pathe: 2.0.3 + sirv: 3.0.2 + tinyglobby: 0.2.15 + tinyrainbow: 2.0.0 + vitest: 3.2.4(@types/debug@4.1.12)(@types/node@24.7.0)(@vitest/ui@3.2.4)(jiti@2.6.1)(jsdom@27.0.1(postcss@8.5.6))(lightningcss@1.30.2)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.6)(yaml@2.8.1) + '@vitest/utils@3.2.4': dependencies: '@vitest/pretty-format': 3.2.4 @@ -14435,6 +14504,8 @@ snapshots: fetchdts@0.1.7: {} + fflate@0.8.2: {} + file-entry-cache@6.0.1: dependencies: flat-cache: 3.2.0 @@ -16891,6 +16962,12 @@ snapshots: transitivePeerDependencies: - supports-color + sirv@3.0.2: + dependencies: + '@polka/url': 1.0.0-next.29 + mrmime: 2.0.1 + totalist: 3.0.1 + slash@3.0.0: {} slice-ansi@5.0.0: @@ -17326,6 +17403,8 @@ snapshots: toidentifier@1.0.1: {} + totalist@3.0.1: {} + tough-cookie@6.0.0: dependencies: tldts: 7.0.16 @@ -17824,7 +17903,7 @@ snapshots: optionalDependencies: vite: 7.1.12(@types/node@24.7.0)(jiti@2.6.1)(lightningcss@1.30.2)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.6)(yaml@2.8.1) - vitest@3.2.4(@types/debug@4.1.12)(@types/node@20.19.24)(jiti@2.6.1)(jsdom@27.0.1(postcss@8.5.6))(lightningcss@1.30.2)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.6)(yaml@2.8.1): + vitest@3.2.4(@types/debug@4.1.12)(@types/node@20.19.24)(@vitest/ui@3.2.4)(jiti@2.6.1)(jsdom@27.0.1(postcss@8.5.6))(lightningcss@1.30.2)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.6)(yaml@2.8.1): dependencies: '@types/chai': 5.2.2 '@vitest/expect': 3.2.4 @@ -17852,6 +17931,7 @@ snapshots: optionalDependencies: '@types/debug': 4.1.12 '@types/node': 20.19.24 + '@vitest/ui': 3.2.4(vitest@3.2.4) jsdom: 27.0.1(postcss@8.5.6) transitivePeerDependencies: - jiti @@ -17867,7 +17947,7 @@ snapshots: - tsx - yaml - vitest@3.2.4(@types/debug@4.1.12)(@types/node@24.7.0)(jiti@2.6.1)(jsdom@27.0.1(postcss@8.5.6))(lightningcss@1.30.2)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.6)(yaml@2.8.1): + vitest@3.2.4(@types/debug@4.1.12)(@types/node@24.7.0)(@vitest/ui@3.2.4)(jiti@2.6.1)(jsdom@27.0.1(postcss@8.5.6))(lightningcss@1.30.2)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.6)(yaml@2.8.1): dependencies: '@types/chai': 5.2.2 '@vitest/expect': 3.2.4 @@ -17895,6 +17975,7 @@ snapshots: optionalDependencies: '@types/debug': 4.1.12 '@types/node': 24.7.0 + '@vitest/ui': 3.2.4(vitest@3.2.4) jsdom: 27.0.1(postcss@8.5.6) transitivePeerDependencies: - jiti