diff --git a/.changeset/code-contracts-live-query.md b/.changeset/code-contracts-live-query.md new file mode 100644 index 000000000..cf20d86e7 --- /dev/null +++ b/.changeset/code-contracts-live-query.md @@ -0,0 +1,16 @@ +--- +'@tanstack/db': patch +--- + +Add code contracts for live query D2 multiplicity invariant. + +Introduces a contract-based approach to ensure correctness in the live query system, inspired by Cheng Huang's article on AI-assisted development. Contracts verify runtime invariants during development/testing and can be disabled in production. + +Key additions: + +- `contracts.ts` with `precondition()`, `postcondition()`, and `invariant()` utilities +- D2 multiplicity contracts in `CollectionSubscriber.sendChangesToPipeline()` ensuring no duplicate keys are sent to the incremental view maintenance pipeline +- 16 contract verification tests covering multiplicity, tracking, and consistency +- 9 property-based tests using fast-check to explore edge cases with random operation sequences + +Contracts are automatically disabled when `NODE_ENV=production` for zero runtime overhead in production builds. diff --git a/packages/db/package.json b/packages/db/package.json index 2e8f236d0..e01f8d02c 100644 --- a/packages/db/package.json +++ b/packages/db/package.json @@ -55,6 +55,7 @@ "@tanstack/config": "^0.22.2", "@vitest/coverage-istanbul": "^3.2.4", "arktype": "^2.1.29", + "fast-check": "^4.5.3", "mitt": "^3.0.1", "superjson": "^2.2.6", "temporal-polyfill": "^0.3.0" diff --git a/packages/db/src/contracts.ts b/packages/db/src/contracts.ts new file mode 100644 index 000000000..abc0cd328 --- /dev/null +++ b/packages/db/src/contracts.ts @@ -0,0 +1,200 @@ +/** + * Contract utilities for runtime verification of preconditions, postconditions, and invariants. + * + * Inspired by Design by Contract (DbC) principles and Cheng Huang's approach to AI-assisted + * code verification. Contracts serve as executable specifications that: + * - Document expected behavior + * - Catch violations early during development/testing + * - Can be disabled in production for performance + * + * @example + * ```typescript + * function divide(a: number, b: number): number { + * precondition(b !== 0, 'divisor must be non-zero') + * const result = a / b + * postcondition(Number.isFinite(result), 'result must be finite') + * return result + * } + * ``` + */ + +// Contract checking is enabled by default, can be disabled via environment variable +// In production builds, bundlers can tree-shake contract checks when this is false +const CONTRACTS_ENABLED = + typeof process !== `undefined` + ? process.env.NODE_ENV !== `production` && + process.env.DISABLE_CONTRACTS !== `1` + : true + +/** + * Base class for all contract violation errors. + * Extends TanStackDBError for consistent error handling. + */ +export class ContractViolationError extends Error { + constructor( + public readonly violationType: + | `precondition` + | `postcondition` + | `invariant`, + message: string, + ) { + super( + `${violationType.charAt(0).toUpperCase() + violationType.slice(1)} violation: ${message}`, + ) + this.name = `ContractViolationError` + } +} + +/** + * Thrown when a precondition check fails. + * Preconditions define what must be true before a function executes. + */ +export class PreconditionViolationError extends ContractViolationError { + constructor(message: string) { + super(`precondition`, message) + this.name = `PreconditionViolationError` + } +} + +/** + * Thrown when a postcondition check fails. + * Postconditions define what must be true after a function executes. + */ +export class PostconditionViolationError extends ContractViolationError { + constructor(message: string) { + super(`postcondition`, message) + this.name = `PostconditionViolationError` + } +} + +/** + * Thrown when an invariant check fails. + * Invariants define what must always be true for an object/system. + */ +export class InvariantViolationError extends ContractViolationError { + constructor(message: string) { + super(`invariant`, message) + this.name = `InvariantViolationError` + } +} + +/** + * Asserts a precondition that must be true before function execution. + * Use at the beginning of functions to validate inputs and state. + * + * @param condition - Boolean or function returning boolean to check + * @param message - Error message if condition is false + * @throws {PreconditionViolationError} if condition is false + * + * @example + * ```typescript + * function withdraw(amount: number) { + * precondition(amount > 0, 'amount must be positive') + * precondition(this.balance >= amount, 'insufficient balance') + * // ... + * } + * ``` + */ +export function precondition( + condition: boolean | (() => boolean), + message: string, +): asserts condition { + if (!CONTRACTS_ENABLED) return + + const result = typeof condition === `function` ? condition() : condition + if (!result) { + throw new PreconditionViolationError(message) + } +} + +/** + * Asserts a postcondition that must be true after function execution. + * Use at the end of functions to validate outputs and final state. + * + * @param condition - Boolean or function returning boolean to check + * @param message - Error message if condition is false + * @throws {PostconditionViolationError} if condition is false + * + * @example + * ```typescript + * function sort(arr: number[]): number[] { + * const result = [...arr].sort((a, b) => a - b) + * postcondition(result.length === arr.length, 'length preserved') + * postcondition(isSorted(result), 'result is sorted') + * return result + * } + * ``` + */ +export function postcondition( + condition: boolean | (() => boolean), + message: string, +): asserts condition { + if (!CONTRACTS_ENABLED) return + + const result = typeof condition === `function` ? condition() : condition + if (!result) { + throw new PostconditionViolationError(message) + } +} + +/** + * Asserts an invariant that must always be true. + * Use to verify system-wide or object-wide consistency. + * + * @param condition - Boolean or function returning boolean to check + * @param message - Error message if condition is false + * @throws {InvariantViolationError} if condition is false + * + * @example + * ```typescript + * class BinaryTree { + * insert(value: number) { + * // ... insertion logic ... + * invariant(this.isBalanced(), 'tree must remain balanced') + * } + * } + * ``` + */ +export function invariant( + condition: boolean | (() => boolean), + message: string, +): asserts condition { + if (!CONTRACTS_ENABLED) return + + const result = typeof condition === `function` ? condition() : condition + if (!result) { + throw new InvariantViolationError(message) + } +} + +/** + * Helper to check if contracts are currently enabled. + * Useful for conditional contract logic or testing. + */ +export function areContractsEnabled(): boolean { + return CONTRACTS_ENABLED +} + +/** + * Captures a value before an operation for use in postcondition checks. + * Returns undefined when contracts are disabled to avoid computation overhead. + * + * @param getValue - Function that computes the value to capture + * @returns The captured value, or undefined if contracts are disabled + * + * @example + * ```typescript + * function increment(counter: Counter) { + * const oldValue = captureForPostcondition(() => counter.value) + * counter.value++ + * postcondition( + * oldValue === undefined || counter.value === oldValue + 1, + * 'value incremented by exactly 1' + * ) + * } + * ``` + */ +export function captureForPostcondition(getValue: () => T): T | undefined { + if (!CONTRACTS_ENABLED) return undefined + return getValue() +} diff --git a/packages/db/src/query/live/collection-subscriber.ts b/packages/db/src/query/live/collection-subscriber.ts index 303c833fc..0a7128270 100644 --- a/packages/db/src/query/live/collection-subscriber.ts +++ b/packages/db/src/query/live/collection-subscriber.ts @@ -3,6 +3,11 @@ import { normalizeExpressionPaths, normalizeOrderByPaths, } from '../compiler/expressions.js' +import { + captureForPostcondition, + invariant, + postcondition, +} from '../../contracts.js' import type { MultiSetArray, RootStreamBuilder } from '@tanstack/db-ivm' import type { Collection } from '../../collection/index.js' import type { ChangeMessage } from '../../types.js' @@ -130,15 +135,32 @@ export class CollectionSubscriber< return subscription } + /** + * Sends filtered changes to the D2 pipeline, ensuring multiplicity invariants. + * + * @contract + * @invariant D2 multiplicity === 1 for all visible items (no duplicates) + * @invariant sentToD2Keys accurately tracks all keys currently in D2 + * @postcondition All inserts in filteredChanges have unique keys + * @postcondition For each insert: key is added to sentToD2Keys + * @postcondition For each delete: key is removed from sentToD2Keys + */ private sendChangesToPipeline( changes: Iterable>, callback?: () => boolean, ) { + // Capture state before filtering for postcondition verification + const sentKeysBefore = captureForPostcondition( + () => new Set(this.sentToD2Keys), + ) + // Filter changes to prevent duplicate inserts to D2 pipeline. // This ensures D2 multiplicity stays at 1 for visible items, so deletes // properly reduce multiplicity to 0 (triggering DELETE output). const changesArray = Array.isArray(changes) ? changes : [...changes] const filteredChanges: Array> = [] + const insertKeysInBatch = new Set() + for (const change of changesArray) { if (change.type === `insert`) { if (this.sentToD2Keys.has(change.key)) { @@ -146,6 +168,7 @@ export class CollectionSubscriber< continue } this.sentToD2Keys.add(change.key) + insertKeysInBatch.add(change.key) } else if (change.type === `delete`) { // Remove from tracking so future re-inserts are allowed this.sentToD2Keys.delete(change.key) @@ -154,6 +177,34 @@ export class CollectionSubscriber< filteredChanges.push(change) } + // Contract: Verify no duplicate insert keys in filtered output + // This invariant ensures D2 multiplicity stays at 1 + const insertKeys = filteredChanges + .filter((c) => c.type === `insert`) + .map((c) => c.key) + const uniqueInsertKeys = new Set(insertKeys) + invariant( + insertKeys.length === uniqueInsertKeys.size, + `D2 multiplicity invariant violated: duplicate insert keys detected in batch: [${insertKeys.join(`, `)}]`, + ) + + // Contract: Verify sentToD2Keys state is consistent + if (sentKeysBefore !== undefined) { + for (const change of filteredChanges) { + if (change.type === `insert`) { + postcondition( + this.sentToD2Keys.has(change.key), + `sentToD2Keys must contain key ${change.key} after insert`, + ) + } else if (change.type === `delete`) { + postcondition( + !this.sentToD2Keys.has(change.key), + `sentToD2Keys must not contain key ${change.key} after delete`, + ) + } + } + } + // currentSyncState and input are always defined when this method is called // (only called from active subscriptions during a sync session) const input = diff --git a/packages/db/tests/query/live-query-contracts.test.ts b/packages/db/tests/query/live-query-contracts.test.ts new file mode 100644 index 000000000..3ff638939 --- /dev/null +++ b/packages/db/tests/query/live-query-contracts.test.ts @@ -0,0 +1,494 @@ +/** + * Contract verification tests for the live query system. + * + * These tests verify the contracts (preconditions, postconditions, invariants) + * defined in the live query implementation. Based on the code contracts pattern + * from Cheng Huang's article on AI-assisted development. + * + * Key contracts tested: + * 1. D2 Multiplicity Invariant - Each key has multiplicity exactly 1 in D2 + * 2. SentToD2Keys Tracking - Accurately tracks keys sent to the D2 pipeline + * 3. Insert/Delete Consistency - Inserts add to tracking, deletes remove + */ + +import { describe, expect, it } from 'vitest' +import { createCollection } from '../../src/collection/index.js' +import { createLiveQueryCollection, eq } from '../../src/query/index.js' +import { + InvariantViolationError, + PostconditionViolationError, +} from '../../src/contracts.js' +import { mockSyncCollectionOptions } from '../utils.js' + +// Sample types for tests +type Item = { + id: number + name: string + category: string +} + +const sampleItems: Array = [ + { id: 1, name: `Item 1`, category: `A` }, + { id: 2, name: `Item 2`, category: `A` }, + { id: 3, name: `Item 3`, category: `B` }, +] + +function createItemsCollection(initialData: Array = sampleItems) { + return createCollection( + mockSyncCollectionOptions({ + id: `test-items`, + getKey: (item) => item.id, + initialData, + }), + ) +} + +describe(`Live Query Contracts`, () => { + describe(`D2 Multiplicity Invariant`, () => { + /** + * Contract: D2 multiplicity === 1 for all visible items + * Each key should appear exactly once in the D2 pipeline output. + */ + + it(`maintains multiplicity of 1 for initial inserts`, async () => { + const source = createItemsCollection() + const liveQuery = createLiveQueryCollection((q) => + q.from({ item: source }), + ) + + await liveQuery.preload() + + // All items should be present exactly once + expect(liveQuery.size).toBe(3) + expect(liveQuery.get(1)).toBeDefined() + expect(liveQuery.get(2)).toBeDefined() + expect(liveQuery.get(3)).toBeDefined() + }) + + it(`maintains multiplicity of 1 after update (no duplicates)`, async () => { + const source = createItemsCollection() + const liveQuery = createLiveQueryCollection((q) => + q.from({ item: source }), + ) + + await liveQuery.preload() + + // Update an item - should not create duplicate + source.utils.begin() + source.utils.write({ + type: `update`, + value: { id: 1, name: `Updated Item 1`, category: `A` }, + }) + source.utils.commit() + + // Should still have exactly 3 items (no duplicate from update) + expect(liveQuery.size).toBe(3) + expect(liveQuery.get(1)?.name).toBe(`Updated Item 1`) + }) + + it(`maintains multiplicity of 1 after delete and re-insert`, async () => { + const source = createItemsCollection() + const liveQuery = createLiveQueryCollection((q) => + q.from({ item: source }), + ) + + await liveQuery.preload() + + // Delete an item + source.utils.begin() + source.utils.write({ + type: `delete`, + value: { id: 1, name: `Item 1`, category: `A` }, + }) + source.utils.commit() + + expect(liveQuery.size).toBe(2) + expect(liveQuery.get(1)).toBeUndefined() + + // Re-insert the same item + source.utils.begin() + source.utils.write({ + type: `insert`, + value: { id: 1, name: `Item 1 Reinserted`, category: `A` }, + }) + source.utils.commit() + + // Should have 3 items, with the new value + expect(liveQuery.size).toBe(3) + expect(liveQuery.get(1)?.name).toBe(`Item 1 Reinserted`) + }) + + it(`filters duplicate inserts to maintain multiplicity via sentToD2Keys`, async () => { + // This test verifies the sentToD2Keys tracking prevents duplicates + // at the D2 pipeline level. The source collection also has duplicate + // protection, so we test the invariant by verifying consistent state + // after updates (which internally are delete+insert in D2). + const source = createItemsCollection() + const liveQuery = createLiveQueryCollection((q) => + q.from({ item: source }), + ) + + await liveQuery.preload() + + // Multiple rapid updates to same key - internally these become + // delete+insert pairs. The sentToD2Keys tracking ensures we don't + // accidentally send duplicates to D2. + for (let i = 0; i < 5; i++) { + source.utils.begin() + source.utils.write({ + type: `update`, + value: { id: 1, name: `Update ${i}`, category: `A` }, + }) + source.utils.commit() + } + + // Size should still be 3 (no duplicates from rapid updates) + expect(liveQuery.size).toBe(3) + expect(liveQuery.get(1)?.name).toBe(`Update 4`) + }) + }) + + describe(`SentToD2Keys Tracking Postcondition`, () => { + /** + * Contract: sentToD2Keys accurately tracks all keys currently in D2 + * After insert: key is in sentToD2Keys + * After delete: key is not in sentToD2Keys + */ + + it(`tracks keys after sequential inserts`, async () => { + const source = createItemsCollection([]) + const liveQuery = createLiveQueryCollection((q) => + q.from({ item: source }), + ) + + await liveQuery.preload() + + // Insert items one by one + for (let i = 1; i <= 3; i++) { + source.utils.begin() + source.utils.write({ + type: `insert`, + value: { id: i, name: `Item ${i}`, category: `A` }, + }) + source.utils.commit() + + // Each insert should be immediately visible + expect(liveQuery.get(i)).toBeDefined() + } + + expect(liveQuery.size).toBe(3) + }) + + it(`removes key from tracking after delete`, async () => { + const source = createItemsCollection() + const liveQuery = createLiveQueryCollection((q) => + q.from({ item: source }), + ) + + await liveQuery.preload() + + // Verify initial state + expect(liveQuery.get(1)).toBeDefined() + + // Delete + source.utils.begin() + source.utils.write({ + type: `delete`, + value: { id: 1, name: `Item 1`, category: `A` }, + }) + source.utils.commit() + + // Key should be removed from live query (and tracking) + expect(liveQuery.get(1)).toBeUndefined() + expect(liveQuery.size).toBe(2) + }) + + it(`allows re-insert after delete (tracking correctly cleared)`, async () => { + const source = createItemsCollection() + const liveQuery = createLiveQueryCollection((q) => + q.from({ item: source }), + ) + + await liveQuery.preload() + + // Delete then re-insert multiple times + for (let i = 0; i < 3; i++) { + source.utils.begin() + source.utils.write({ + type: `delete`, + value: { id: 1, name: `Item 1`, category: `A` }, + }) + source.utils.commit() + + expect(liveQuery.get(1)).toBeUndefined() + + source.utils.begin() + source.utils.write({ + type: `insert`, + value: { id: 1, name: `Item 1 v${i + 2}`, category: `A` }, + }) + source.utils.commit() + + expect(liveQuery.get(1)).toBeDefined() + expect(liveQuery.get(1)?.name).toBe(`Item 1 v${i + 2}`) + } + + expect(liveQuery.size).toBe(3) + }) + }) + + describe(`Filtered Query Contracts`, () => { + /** + * When a live query has a WHERE clause, the D2 pipeline should only + * contain items matching the filter. + */ + + it(`only includes matching items in D2 output`, async () => { + const source = createItemsCollection() + const liveQuery = createLiveQueryCollection((q) => + q.from({ item: source }).where(({ item }) => eq(item.category, `A`)), + ) + + await liveQuery.preload() + + // Only category A items should be present + expect(liveQuery.size).toBe(2) + expect(liveQuery.get(1)).toBeDefined() + expect(liveQuery.get(2)).toBeDefined() + expect(liveQuery.get(3)).toBeUndefined() // category B + }) + + it(`removes item from D2 when update moves it out of filter`, async () => { + const source = createItemsCollection() + const liveQuery = createLiveQueryCollection((q) => + q.from({ item: source }).where(({ item }) => eq(item.category, `A`)), + ) + + await liveQuery.preload() + + expect(liveQuery.size).toBe(2) + + // Update item 1 to category B (out of filter) + source.utils.begin() + source.utils.write({ + type: `update`, + value: { id: 1, name: `Item 1`, category: `B` }, + }) + source.utils.commit() + + // Item 1 should no longer be in the live query + expect(liveQuery.size).toBe(1) + expect(liveQuery.get(1)).toBeUndefined() + expect(liveQuery.get(2)).toBeDefined() + }) + + it(`adds item to D2 when update moves it into filter`, async () => { + const source = createItemsCollection() + const liveQuery = createLiveQueryCollection((q) => + q.from({ item: source }).where(({ item }) => eq(item.category, `A`)), + ) + + await liveQuery.preload() + + expect(liveQuery.size).toBe(2) + expect(liveQuery.get(3)).toBeUndefined() // category B initially + + // Update item 3 to category A (into filter) + source.utils.begin() + source.utils.write({ + type: `update`, + value: { id: 3, name: `Item 3`, category: `A` }, + }) + source.utils.commit() + + // Item 3 should now be in the live query + expect(liveQuery.size).toBe(3) + expect(liveQuery.get(3)).toBeDefined() + expect(liveQuery.get(3)?.category).toBe(`A`) + }) + }) + + describe(`Change Sequence Consistency`, () => { + /** + * Contract: Live query state should be consistent with source state + * after any sequence of insert/update/delete operations. + */ + + it(`remains consistent after mixed operation sequence`, async () => { + const source = createItemsCollection([]) + const liveQuery = createLiveQueryCollection((q) => + q.from({ item: source }), + ) + + await liveQuery.preload() + + // Mixed sequence of operations + const operations = [ + { type: `insert` as const, id: 1, name: `A` }, + { type: `insert` as const, id: 2, name: `B` }, + { type: `update` as const, id: 1, name: `A-updated` }, + { type: `insert` as const, id: 3, name: `C` }, + { type: `delete` as const, id: 2 }, + { type: `insert` as const, id: 4, name: `D` }, + { type: `update` as const, id: 3, name: `C-updated` }, + { type: `delete` as const, id: 1 }, + { type: `insert` as const, id: 1, name: `A-reinserted` }, + ] + + for (const op of operations) { + source.utils.begin() + if (op.type === `insert`) { + source.utils.write({ + type: `insert`, + value: { id: op.id, name: op.name, category: `X` }, + }) + } else if (op.type === `update`) { + source.utils.write({ + type: `update`, + value: { id: op.id, name: op.name, category: `X` }, + }) + } else { + source.utils.write({ + type: `delete`, + value: { id: op.id, name: ``, category: `X` }, + }) + } + source.utils.commit() + } + + // Final state should have items 1, 3, 4 + expect(liveQuery.size).toBe(3) + expect(liveQuery.get(1)?.name).toBe(`A-reinserted`) + expect(liveQuery.get(2)).toBeUndefined() + expect(liveQuery.get(3)?.name).toBe(`C-updated`) + expect(liveQuery.get(4)?.name).toBe(`D`) + }) + + it(`handles rapid insert-delete cycles`, async () => { + const source = createItemsCollection([]) + const liveQuery = createLiveQueryCollection((q) => + q.from({ item: source }), + ) + + await liveQuery.preload() + + // Rapid insert-delete cycles for same key + for (let cycle = 0; cycle < 5; cycle++) { + source.utils.begin() + source.utils.write({ + type: `insert`, + value: { id: 1, name: `Cycle ${cycle}`, category: `X` }, + }) + source.utils.commit() + + expect(liveQuery.get(1)?.name).toBe(`Cycle ${cycle}`) + + source.utils.begin() + source.utils.write({ + type: `delete`, + value: { id: 1, name: `Cycle ${cycle}`, category: `X` }, + }) + source.utils.commit() + + expect(liveQuery.get(1)).toBeUndefined() + } + + expect(liveQuery.size).toBe(0) + }) + }) + + describe(`Batch Operation Contracts`, () => { + /** + * Contract: Batch operations (multiple changes in one begin/commit) + * should maintain all invariants. + */ + + it(`handles batch inserts without duplicate keys in D2`, async () => { + const source = createItemsCollection([]) + const liveQuery = createLiveQueryCollection((q) => + q.from({ item: source }), + ) + + await liveQuery.preload() + + // Batch insert multiple items + source.utils.begin() + for (let i = 1; i <= 10; i++) { + source.utils.write({ + type: `insert`, + value: { id: i, name: `Batch Item ${i}`, category: `A` }, + }) + } + source.utils.commit() + + // All 10 items should be present exactly once + expect(liveQuery.size).toBe(10) + for (let i = 1; i <= 10; i++) { + expect(liveQuery.get(i)).toBeDefined() + expect(liveQuery.get(i)?.name).toBe(`Batch Item ${i}`) + } + }) + + it(`handles batch with mixed operations`, async () => { + const source = createItemsCollection() + const liveQuery = createLiveQueryCollection((q) => + q.from({ item: source }), + ) + + await liveQuery.preload() + + expect(liveQuery.size).toBe(3) + + // Batch with mixed operations + source.utils.begin() + source.utils.write({ + type: `delete`, + value: { id: 1, name: `Item 1`, category: `A` }, + }) + source.utils.write({ + type: `update`, + value: { id: 2, name: `Updated Item 2`, category: `A` }, + }) + source.utils.write({ + type: `insert`, + value: { id: 4, name: `New Item 4`, category: `C` }, + }) + source.utils.commit() + + expect(liveQuery.size).toBe(3) // -1 delete, +1 insert = same size + expect(liveQuery.get(1)).toBeUndefined() + expect(liveQuery.get(2)?.name).toBe(`Updated Item 2`) + expect(liveQuery.get(3)).toBeDefined() + expect(liveQuery.get(4)?.name).toBe(`New Item 4`) + }) + }) +}) + +describe(`Contract Error Types`, () => { + /** + * Verify that contract violation errors are properly typed + * and can be caught/identified. + */ + + it(`InvariantViolationError has correct structure`, () => { + const error = new InvariantViolationError(`test invariant message`) + + expect(error).toBeInstanceOf(Error) + expect(error).toBeInstanceOf(InvariantViolationError) + expect(error.name).toBe(`InvariantViolationError`) + expect(error.violationType).toBe(`invariant`) + expect(error.message).toContain(`test invariant message`) + expect(error.message).toContain(`Invariant violation`) + }) + + it(`PostconditionViolationError has correct structure`, () => { + const error = new PostconditionViolationError(`test postcondition message`) + + expect(error).toBeInstanceOf(Error) + expect(error).toBeInstanceOf(PostconditionViolationError) + expect(error.name).toBe(`PostconditionViolationError`) + expect(error.violationType).toBe(`postcondition`) + expect(error.message).toContain(`test postcondition message`) + expect(error.message).toContain(`Postcondition violation`) + }) +}) diff --git a/packages/db/tests/query/live-query-properties.test.ts b/packages/db/tests/query/live-query-properties.test.ts new file mode 100644 index 000000000..a292dc577 --- /dev/null +++ b/packages/db/tests/query/live-query-properties.test.ts @@ -0,0 +1,503 @@ +/** + * Property-based tests for live query contracts. + * + * These tests use fast-check to generate random sequences of operations + * and verify that invariants hold across all possible inputs. Based on + * Cheng Huang's approach of using property-based tests to explore edge cases. + * + * Key properties tested: + * 1. D2 never contains duplicate keys (multiplicity invariant) + * 2. Live query state matches expected state after any change sequence + * 3. Delete followed by insert always succeeds (tracking cleared correctly) + * 4. Batch operations maintain consistency + */ + +import { describe, expect, it } from 'vitest' +import * as fc from 'fast-check' +import { createCollection } from '../../src/collection/index.js' +import { createLiveQueryCollection, eq } from '../../src/query/index.js' +import { mockSyncCollectionOptions } from '../utils.js' + +// Types for property-based testing +type Item = { + id: number + name: string + category: string +} + +type Operation = + | { type: `insert`; id: number; name: string; category: string } + | { type: `update`; id: number; name: string; category: string } + | { type: `delete`; id: number } + +// Arbitraries for generating random test data +const itemIdArb = fc.integer({ min: 1, max: 20 }) +const nameArb = fc.string({ minLength: 1, maxLength: 10 }) +const categoryArb = fc.constantFrom(`A`, `B`, `C`) + +const insertOpArb: fc.Arbitrary = fc.record({ + type: fc.constant(`insert` as const), + id: itemIdArb, + name: nameArb, + category: categoryArb, +}) + +const updateOpArb: fc.Arbitrary = fc.record({ + type: fc.constant(`update` as const), + id: itemIdArb, + name: nameArb, + category: categoryArb, +}) + +const deleteOpArb: fc.Arbitrary = fc.record({ + type: fc.constant(`delete` as const), + id: itemIdArb, +}) + +const operationArb: fc.Arbitrary = fc.oneof( + insertOpArb, + updateOpArb, + deleteOpArb, +) + +// Helper to create a fresh collection for each test +let collectionCounter = 0 +function createTestCollection() { + return createCollection( + mockSyncCollectionOptions({ + id: `property-test-${collectionCounter++}`, + getKey: (item) => item.id, + initialData: [], + }), + ) +} + +/** + * Simulates expected state after a sequence of operations. + * Returns what keys should exist and their values. + */ +function simulateOperations( + operations: Array, +): Map { + const state = new Map() + + for (const op of operations) { + if (op.type === `insert`) { + // Insert only succeeds if key doesn't exist + if (!state.has(op.id)) { + state.set(op.id, { name: op.name, category: op.category }) + } + } else if (op.type === `update`) { + // Update only succeeds if key exists + if (state.has(op.id)) { + state.set(op.id, { name: op.name, category: op.category }) + } + } else { + // Delete only succeeds if key exists + state.delete(op.id) + } + } + + return state +} + +/** + * Applies operations to a real collection, tracking state. + */ +function applyOperationsToCollection( + source: ReturnType, + operations: Array, +): Set { + const existingIds = new Set() + + for (const op of operations) { + source.utils.begin() + + if (op.type === `insert`) { + if (!existingIds.has(op.id)) { + source.utils.write({ + type: `insert`, + value: { id: op.id, name: op.name, category: op.category }, + }) + existingIds.add(op.id) + } + } else if (op.type === `update`) { + if (existingIds.has(op.id)) { + source.utils.write({ + type: `update`, + value: { id: op.id, name: op.name, category: op.category }, + }) + } + } else { + // op.type === `delete` + if (existingIds.has(op.id)) { + source.utils.write({ + type: `delete`, + value: { id: op.id, name: ``, category: `` }, + }) + existingIds.delete(op.id) + } + } + + source.utils.commit() + } + + return existingIds +} + +describe(`Live Query Property-Based Tests`, () => { + describe(`D2 Multiplicity Invariant`, () => { + it(`live query never contains duplicate keys after any operation sequence`, async () => { + await fc.assert( + fc.asyncProperty( + fc.array(operationArb, { minLength: 1, maxLength: 50 }), + async (operations) => { + const source = createTestCollection() + const liveQuery = createLiveQueryCollection((q) => + q.from({ item: source }), + ) + + await liveQuery.preload() + + applyOperationsToCollection(source, operations) + + // Verify no duplicate keys (each key appears exactly once) + const items = liveQuery.toArray + const keys = items.map((item) => item.id) + const uniqueKeys = new Set(keys) + + expect(keys.length).toBe(uniqueKeys.size) + }, + ), + { numRuns: 100 }, + ) + }) + + it(`live query size matches expected state`, async () => { + await fc.assert( + fc.asyncProperty( + fc.array(operationArb, { minLength: 0, maxLength: 30 }), + async (operations) => { + const source = createTestCollection() + const liveQuery = createLiveQueryCollection((q) => + q.from({ item: source }), + ) + + await liveQuery.preload() + + const expectedState = simulateOperations(operations) + applyOperationsToCollection(source, operations) + + expect(liveQuery.size).toBe(expectedState.size) + }, + ), + { numRuns: 100 }, + ) + }) + }) + + describe(`State Consistency`, () => { + it(`live query contains exactly the expected items after any sequence`, async () => { + await fc.assert( + fc.asyncProperty( + fc.array(operationArb, { minLength: 0, maxLength: 30 }), + async (operations) => { + const source = createTestCollection() + const liveQuery = createLiveQueryCollection((q) => + q.from({ item: source }), + ) + + await liveQuery.preload() + + const expectedState = simulateOperations(operations) + applyOperationsToCollection(source, operations) + + // Check size matches + expect(liveQuery.size).toBe(expectedState.size) + + // Check each expected item is present with correct values + for (const [id, expected] of expectedState) { + const actual = liveQuery.get(id) + expect(actual).toBeDefined() + expect(actual?.name).toBe(expected.name) + expect(actual?.category).toBe(expected.category) + } + + // Check no unexpected items + for (const item of liveQuery.toArray) { + expect(expectedState.has(item.id)).toBe(true) + } + }, + ), + { numRuns: 100 }, + ) + }) + }) + + describe(`Delete-Insert Cycles`, () => { + it(`can always re-insert after delete for any key`, async () => { + await fc.assert( + fc.asyncProperty( + itemIdArb, + fc.integer({ min: 1, max: 10 }), + async (id, cycles) => { + const source = createTestCollection() + const liveQuery = createLiveQueryCollection((q) => + q.from({ item: source }), + ) + + await liveQuery.preload() + + for (let i = 0; i < cycles; i++) { + // Insert + source.utils.begin() + source.utils.write({ + type: `insert`, + value: { id, name: `Cycle ${i}`, category: `A` }, + }) + source.utils.commit() + + expect(liveQuery.get(id)).toBeDefined() + expect(liveQuery.get(id)?.name).toBe(`Cycle ${i}`) + + // Delete + source.utils.begin() + source.utils.write({ + type: `delete`, + value: { id, name: ``, category: `` }, + }) + source.utils.commit() + + expect(liveQuery.get(id)).toBeUndefined() + } + + expect(liveQuery.size).toBe(0) + }, + ), + { numRuns: 50 }, + ) + }) + }) + + describe(`Filtered Query Properties`, () => { + it(`filtered query only contains items matching the filter`, async () => { + await fc.assert( + fc.asyncProperty( + fc.array(operationArb, { minLength: 1, maxLength: 20 }), + categoryArb, + async (operations, filterCategory) => { + const source = createTestCollection() + const liveQuery = createLiveQueryCollection((q) => + q + .from({ item: source }) + .where(({ item }) => eq(item.category, filterCategory)), + ) + + await liveQuery.preload() + + const expectedState = simulateOperations(operations) + applyOperationsToCollection(source, operations) + + // All items in live query must match filter + for (const item of liveQuery.toArray) { + expect(item.category).toBe(filterCategory) + } + + // Count matching items from expected state + let expectedMatchCount = 0 + for (const [, value] of expectedState) { + if (value.category === filterCategory) { + expectedMatchCount++ + } + } + + expect(liveQuery.size).toBe(expectedMatchCount) + }, + ), + { numRuns: 100 }, + ) + }) + + it(`item moves in/out of filter correctly on update`, async () => { + await fc.assert( + fc.asyncProperty( + itemIdArb, + fc.array(categoryArb, { minLength: 2, maxLength: 10 }), + async (id, categorySequence) => { + const source = createTestCollection() + const filterCategory = `A` + const liveQuery = createLiveQueryCollection((q) => + q + .from({ item: source }) + .where(({ item }) => eq(item.category, filterCategory)), + ) + + await liveQuery.preload() + + // Initial insert + source.utils.begin() + source.utils.write({ + type: `insert`, + value: { id, name: `Test`, category: categorySequence[0]! }, + }) + source.utils.commit() + + // Update through category sequence + for (let i = 1; i < categorySequence.length; i++) { + const newCategory = categorySequence[i]! + + source.utils.begin() + source.utils.write({ + type: `update`, + value: { id, name: `Test`, category: newCategory }, + }) + source.utils.commit() + + if (newCategory === filterCategory) { + expect(liveQuery.get(id)).toBeDefined() + } else { + expect(liveQuery.get(id)).toBeUndefined() + } + } + }, + ), + { numRuns: 50 }, + ) + }) + }) + + describe(`Batch Operation Properties`, () => { + it(`batch insert maintains no-duplicate invariant`, async () => { + await fc.assert( + fc.asyncProperty( + fc.array(insertOpArb, { minLength: 1, maxLength: 20 }), + async (inserts) => { + const source = createTestCollection() + const liveQuery = createLiveQueryCollection((q) => + q.from({ item: source }), + ) + + await liveQuery.preload() + + // Batch insert, tracking which IDs we've added + const insertedIds = new Set() + + source.utils.begin() + for (const op of inserts) { + if (!insertedIds.has(op.id)) { + source.utils.write({ + type: `insert`, + value: { id: op.id, name: op.name, category: op.category }, + }) + insertedIds.add(op.id) + } + } + source.utils.commit() + + // Verify no duplicates + const keys = liveQuery.toArray.map((item) => item.id) + const uniqueKeys = new Set(keys) + + expect(keys.length).toBe(uniqueKeys.size) + expect(liveQuery.size).toBe(insertedIds.size) + }, + ), + { numRuns: 100 }, + ) + }) + + it(`sequential and batch produce same result`, async () => { + await fc.assert( + fc.asyncProperty( + fc.array(operationArb, { minLength: 1, maxLength: 20 }), + async (operations) => { + // Run sequential + const source1 = createTestCollection() + const liveQuery1 = createLiveQueryCollection((q) => + q.from({ item: source1 }), + ) + await liveQuery1.preload() + applyOperationsToCollection(source1, operations) + + // Run batched (all ops in one transaction) + const source2 = createTestCollection() + const liveQuery2 = createLiveQueryCollection((q) => + q.from({ item: source2 }), + ) + await liveQuery2.preload() + + const existingIds = new Set() + source2.utils.begin() + for (const op of operations) { + if (op.type === `insert` && !existingIds.has(op.id)) { + source2.utils.write({ + type: `insert`, + value: { id: op.id, name: op.name, category: op.category }, + }) + existingIds.add(op.id) + } else if (op.type === `update` && existingIds.has(op.id)) { + source2.utils.write({ + type: `update`, + value: { id: op.id, name: op.name, category: op.category }, + }) + } else if (op.type === `delete` && existingIds.has(op.id)) { + source2.utils.write({ + type: `delete`, + value: { id: op.id, name: ``, category: `` }, + }) + existingIds.delete(op.id) + } + } + source2.utils.commit() + + // Both should have same size + expect(liveQuery1.size).toBe(liveQuery2.size) + + // Both should have same items + for (const item of liveQuery1.toArray) { + const item2 = liveQuery2.get(item.id) + expect(item2).toBeDefined() + expect(item2?.name).toBe(item.name) + expect(item2?.category).toBe(item.category) + } + }, + ), + { numRuns: 100 }, + ) + }) + }) + + describe(`Stress Tests`, () => { + it(`handles large operation sequences without invariant violations`, async () => { + await fc.assert( + fc.asyncProperty( + fc.array(operationArb, { minLength: 100, maxLength: 200 }), + async (operations) => { + const source = createTestCollection() + const liveQuery = createLiveQueryCollection((q) => + q.from({ item: source }), + ) + + await liveQuery.preload() + + const expectedState = simulateOperations(operations) + applyOperationsToCollection(source, operations) + + // Core invariants hold + expect(liveQuery.size).toBe(expectedState.size) + + const keys = liveQuery.toArray.map((item) => item.id) + const uniqueKeys = new Set(keys) + expect(keys.length).toBe(uniqueKeys.size) + + // All expected items present + for (const [id] of expectedState) { + expect(liveQuery.get(id)).toBeDefined() + } + }, + ), + { numRuns: 20 }, + ) + }) + }) +}) diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index 8f8a15bfc..256cd92cd 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -576,7 +576,7 @@ importers: version: 0.45.1(@opentelemetry/api@1.9.0)(@types/pg@8.16.0)(gel@2.1.1)(kysely@0.28.8)(pg@8.16.3)(postgres@3.4.7) drizzle-zod: specifier: ^0.8.3 - version: 0.8.3(drizzle-orm@0.45.1(@opentelemetry/api@1.9.0)(@types/pg@8.16.0)(gel@2.1.1)(kysely@0.28.8)(pg@8.16.3)(postgres@3.4.7))(zod@4.1.13) + version: 0.8.3(drizzle-orm@0.45.1(@opentelemetry/api@1.9.0)(@types/pg@8.16.0)(gel@2.1.1)(kysely@0.28.8)(pg@8.16.3)(postgres@3.4.7))(zod@3.25.76) express: specifier: ^4.22.1 version: 4.22.1 @@ -706,6 +706,9 @@ importers: arktype: specifier: ^2.1.29 version: 2.1.29 + fast-check: + specifier: ^4.5.3 + version: 4.5.3 mitt: specifier: ^3.0.1 version: 3.0.1 @@ -747,8 +750,6 @@ importers: specifier: ^3.2.4 version: 3.2.4(@types/debug@4.1.12)(@types/node@24.7.0)(@vitest/ui@3.2.4)(jiti@2.6.1)(jsdom@27.4.0)(lightningcss@1.30.2)(sass@1.90.0)(terser@5.44.0)(tsx@4.21.0)(yaml@2.8.1) - packages/db-collections: {} - packages/db-ivm: dependencies: fractional-indexing: @@ -5639,6 +5640,10 @@ packages: extendable-error@0.1.7: resolution: {integrity: sha512-UOiS2in6/Q0FK0R0q6UY9vYpQ21mr/Qn1KOnte7vsACuNJf514WvCCUHSRCPcgjPT2bAhNIJdlE6bVap1GKmeg==} + fast-check@4.5.3: + resolution: {integrity: sha512-IE9csY7lnhxBnA8g/WI5eg/hygA6MGWJMSNfFRrBlXUciADEhS1EDB0SIsMSvzubzIlOBbVITSsypCsW717poA==} + engines: {node: '>=12.17.0'} + fast-deep-equal@3.1.3: resolution: {integrity: sha512-f3qQ9oQy9j2AhBe/H9VC91wLmKBCCU/gDOnKNAYG5hswO7BLKj09Hc5HYNz9cGI++xlpDCIgDaitVs03ATR84Q==} @@ -7406,6 +7411,9 @@ packages: resolution: {integrity: sha512-vYt7UD1U9Wg6138shLtLOvdAu+8DsC/ilFtEVHcH+wydcSpNE20AfSOduf6MkRFahL5FY7X1oU7nKVZFtfq8Fg==} engines: {node: '>=6'} + pure-rand@7.0.1: + resolution: {integrity: sha512-oTUZM/NAZS8p7ANR3SHh30kXB+zK2r2BPcEn/awJIbOvq82WoMN4p62AWWp3Hhw50G0xMsw1mhIBLqHw64EcNQ==} + pvtsutils@1.3.6: resolution: {integrity: sha512-PLgQXQ6H2FWCaeRak8vvk1GW462lMxB5s3Jm673N82zI4vqtVUPuZdffdZbPDFRoU8kAhItWFtPCWiPpp4/EDg==} @@ -13294,6 +13302,11 @@ snapshots: pg: 8.16.3 postgres: 3.4.7 + drizzle-zod@0.8.3(drizzle-orm@0.45.1(@opentelemetry/api@1.9.0)(@types/pg@8.16.0)(gel@2.1.1)(kysely@0.28.8)(pg@8.16.3)(postgres@3.4.7))(zod@3.25.76): + dependencies: + drizzle-orm: 0.45.1(@opentelemetry/api@1.9.0)(@types/pg@8.16.0)(gel@2.1.1)(kysely@0.28.8)(pg@8.16.3)(postgres@3.4.7) + zod: 3.25.76 + drizzle-zod@0.8.3(drizzle-orm@0.45.1(@opentelemetry/api@1.9.0)(@types/pg@8.16.0)(gel@2.1.1)(kysely@0.28.8)(pg@8.16.3)(postgres@3.4.7))(zod@4.1.13): dependencies: drizzle-orm: 0.45.1(@opentelemetry/api@1.9.0)(@types/pg@8.16.0)(gel@2.1.1)(kysely@0.28.8)(pg@8.16.3)(postgres@3.4.7) @@ -13942,6 +13955,10 @@ snapshots: extendable-error@0.1.7: {} + fast-check@4.5.3: + dependencies: + pure-rand: 7.0.1 + fast-deep-equal@3.1.3: {} fast-diff@1.3.0: {} @@ -15801,6 +15818,8 @@ snapshots: punycode@2.3.1: {} + pure-rand@7.0.1: {} + pvtsutils@1.3.6: dependencies: tslib: 2.8.1