From 27a372874597ad09ffda08044cfd6094194f79f5 Mon Sep 17 00:00:00 2001 From: stevensJourney Date: Wed, 1 Oct 2025 10:24:45 +0200 Subject: [PATCH 01/25] wip: PowerSync collections --- packages/powersync-db-collection/CHANGELOG.md | 1 + packages/powersync-db-collection/package.json | 69 ++++ .../src/PendingOperationStore.ts | 47 +++ .../src/PowerSyncTransactor.ts | 139 +++++++ .../powersync-db-collection/src/helpers.ts | 30 ++ packages/powersync-db-collection/src/index.ts | 1 + .../powersync-db-collection/src/powersync.ts | 340 ++++++++++++++++++ .../tests/powersync.test.ts | 257 +++++++++++++ .../tsconfig.docs.json | 9 + .../powersync-db-collection/tsconfig.json | 20 ++ .../powersync-db-collection/vite.config.ts | 21 ++ pnpm-lock.yaml | 139 ++++--- 12 files changed, 1031 insertions(+), 42 deletions(-) create mode 100644 packages/powersync-db-collection/CHANGELOG.md create mode 100644 packages/powersync-db-collection/package.json create mode 100644 packages/powersync-db-collection/src/PendingOperationStore.ts create mode 100644 packages/powersync-db-collection/src/PowerSyncTransactor.ts create mode 100644 packages/powersync-db-collection/src/helpers.ts create mode 100644 packages/powersync-db-collection/src/index.ts create mode 100644 packages/powersync-db-collection/src/powersync.ts create mode 100644 packages/powersync-db-collection/tests/powersync.test.ts create mode 100644 packages/powersync-db-collection/tsconfig.docs.json create mode 100644 packages/powersync-db-collection/tsconfig.json create mode 100644 packages/powersync-db-collection/vite.config.ts diff --git a/packages/powersync-db-collection/CHANGELOG.md b/packages/powersync-db-collection/CHANGELOG.md new file mode 100644 index 000000000..1c804e7e2 --- /dev/null +++ b/packages/powersync-db-collection/CHANGELOG.md @@ -0,0 +1 @@ +# @tanstack/powersync-db-collection diff --git a/packages/powersync-db-collection/package.json b/packages/powersync-db-collection/package.json new file mode 100644 index 000000000..9fa1f15be --- /dev/null +++ b/packages/powersync-db-collection/package.json @@ -0,0 +1,69 @@ +{ + "name": "@tanstack/powersync-db-collection", + "description": "PowerSync collection for TanStack DB", + "version": "0.0.0", + "dependencies": { + "@powersync/common": "^1.39.0", + "@standard-schema/spec": "^1.0.0", + "@tanstack/db": "workspace:*", + "@tanstack/store": "^0.7.7", + "debug": "^4.4.3", + "p-defer": "^4.0.1" + }, + "devDependencies": { + "@powersync/better-sqlite3": "^0.2.0", + "@powersync/node": "^0.11.0", + "@types/debug": "^4.1.12", + "@vitest/coverage-istanbul": "^3.2.4" + }, + "exports": { + ".": { + "import": { + "types": "./dist/esm/index.d.ts", + "default": "./dist/esm/index.js" + }, + "require": { + "types": "./dist/cjs/index.d.cts", + "default": "./dist/cjs/index.cjs" + } + }, + "./package.json": "./package.json" + }, + "files": [ + "dist", + "src" + ], + "main": "dist/cjs/index.cjs", + "module": "dist/esm/index.js", + "packageManager": "pnpm@10.17.0", + "author": "JOURNEYAPPS", + "license": "Apache-2.0", + "repository": { + "type": "git", + "url": "https://github.com/TanStack/db.git", + "directory": "packages/powersync-db-collection" + }, + "homepage": "https://tanstack.com/db", + "keywords": [ + "powersync", + "realtime", + "local-first", + "sync-engine", + "sync", + "replication", + "opfs", + "indexeddb", + "localstorage", + "optimistic", + "typescript" + ], + "scripts": { + "build": "vite build", + "dev": "vite build --watch", + "lint": "eslint . --fix", + "test": "npx vitest --run" + }, + "sideEffects": false, + "type": "module", + "types": "dist/esm/index.d.ts" +} diff --git a/packages/powersync-db-collection/src/PendingOperationStore.ts b/packages/powersync-db-collection/src/PendingOperationStore.ts new file mode 100644 index 000000000..9353cd03e --- /dev/null +++ b/packages/powersync-db-collection/src/PendingOperationStore.ts @@ -0,0 +1,47 @@ +import pDefer from "p-defer" +import type { DiffTriggerOperation } from "@powersync/common" +import type { DeferredPromise } from "p-defer" + +export type PendingOperation = { + operation: DiffTriggerOperation + id: string + timestamp: string +} + +/** + * Optimistic mutations have their optimistic state discarded once transactions have + * been applied. + * We need to ensure that an applied transaction has been observed by the sync diff trigger + * before resoling the transaction application call. + * This store allows registering a wait for a pending operation to have been observed. + */ +export class PendingOperationStore { + private pendingOperations = new Map>() + + /** + * @returns A promise which will resolve once the specified operation has been seen. + */ + waitFor(operation: PendingOperation): Promise { + const managedPromise = pDefer() + this.pendingOperations.set(operation, managedPromise) + return managedPromise.promise + } + + /** + * Marks a set of operations as seen. This will resolve any pending promises. + */ + resolvePendingFor(operations: Array) { + for (const operation of operations) { + for (const [pendingOp, deferred] of this.pendingOperations.entries()) { + if ( + pendingOp.operation == operation.operation && + pendingOp.id == operation.id && + pendingOp.timestamp == operation.timestamp + ) { + deferred.resolve() + this.pendingOperations.delete(pendingOp) + } + } + } + } +} diff --git a/packages/powersync-db-collection/src/PowerSyncTransactor.ts b/packages/powersync-db-collection/src/PowerSyncTransactor.ts new file mode 100644 index 000000000..5d145e270 --- /dev/null +++ b/packages/powersync-db-collection/src/PowerSyncTransactor.ts @@ -0,0 +1,139 @@ +import { sanitizeSQL } from "@powersync/common" +import DebugModule from "debug" +import { asPowerSyncRecord } from "./helpers" +import type { AbstractPowerSyncDatabase, LockContext } from "@powersync/common" +import type { Transaction } from "@tanstack/db" +import type { + PendingOperation, + PendingOperationStore, +} from "./PendingOperationStore" +import type { PowerSyncRecord } from "./helpers" + +const debug = DebugModule.debug(`ts/db:powersync`) + +export type TransactorOptions = { + database: AbstractPowerSyncDatabase + tableName: string + pendingOperationStore: PendingOperationStore + trackedTableName: string +} + +/** + * Handles persisting Tanstack DB transactions to the PowerSync SQLite DB. + */ +export class PowerSyncTransactor> { + database: AbstractPowerSyncDatabase + pendingOperationStore: PendingOperationStore + tableName: string + trackedTableName: string + + constructor(options: TransactorOptions) { + this.database = options.database + this.pendingOperationStore = options.pendingOperationStore + this.tableName = sanitizeSQL`${options.tableName}` + this.trackedTableName = sanitizeSQL`${options.trackedTableName}` + } + + /** + * Persists a {@link Transaction} to PowerSync's SQLite DB. + */ + async applyTransaction(transaction: Transaction) { + const { mutations } = transaction + + // Persist to PowerSync + const { whenComplete } = await this.database.writeTransaction( + async (tx) => { + for (const mutation of mutations) { + switch (mutation.type) { + case `insert`: + await this.handleInsert(asPowerSyncRecord(mutation.modified), tx) + break + case `update`: + await this.handleUpdate(asPowerSyncRecord(mutation.modified), tx) + break + case `delete`: + await this.handleDelete(asPowerSyncRecord(mutation.original), tx) + break + } + } + + /** + * Fetch the last diff operation in the queue. + * We need to wait for this operation to be seen by the + * sync handler before returning from the application call. + */ + const lastDiffOp = await tx.getOptional(` + SELECT + id, operation, timestamp + FROM + ${this.trackedTableName} + ORDER BY + timestamp DESC + LIMIT 1 + `) + + /** + * Return a promise from the writeTransaction, without awaiting it. + * This promise will resolve once the entire transaction has been + * observed via the diff triggers. + * We return without awaiting in order to free the writeLock. + */ + return { + whenComplete: lastDiffOp + ? this.pendingOperationStore.waitFor(lastDiffOp) + : Promise.resolve(), + } + } + ) + + // Wait for the change to be observed via the diff trigger + await whenComplete + } + + protected async handleInsert( + mutation: PowerSyncRecord, + context: LockContext + ) { + debug(`insert`, mutation) + const keys = Object.keys(mutation).map((key) => sanitizeSQL`${key}`) + await context.execute( + ` + INSERT into ${this.tableName} + (${keys.join(`, `)}) + VALUES + (${keys.map((_) => `?`).join(`, `)}) + `, + Object.values(mutation) + ) + } + + protected async handleUpdate( + mutation: PowerSyncRecord, + context: LockContext + ) { + debug(`update`, mutation) + + const keys = Object.keys(mutation).map((key) => sanitizeSQL`${key}`) + await context.execute( + ` + UPDATE ${this.tableName} + SET ${keys.map((key) => `${key} = ?`).join(`, `)} + WHERE id = ? + `, + [...Object.values(mutation), mutation.id] + ) + } + + protected async handleDelete( + mutation: PowerSyncRecord, + context: LockContext + ) { + debug(`delete`, mutation) + await context.execute( + ` + DELETE FROM ${this.tableName} WHERE id = ? + `, + [mutation.id] + ) + } +} diff --git a/packages/powersync-db-collection/src/helpers.ts b/packages/powersync-db-collection/src/helpers.ts new file mode 100644 index 000000000..29c42b7a9 --- /dev/null +++ b/packages/powersync-db-collection/src/helpers.ts @@ -0,0 +1,30 @@ +import { DiffTriggerOperation } from "@powersync/common" + +/** + * All PowerSync table records have a uuid `id` column. + */ +export type PowerSyncRecord = { + id: string + [key: string]: unknown +} + +export function asPowerSyncRecord(record: any): PowerSyncRecord { + if (typeof record.id !== `string`) { + throw new Error(`Record must have a string id field`) + } + return record as PowerSyncRecord +} + +/** + * Maps Tanstack DB operations to {@link DiffTriggerOperation} + */ +export function mapOperation(operation: DiffTriggerOperation) { + switch (operation) { + case DiffTriggerOperation.INSERT: + return `insert` + case DiffTriggerOperation.UPDATE: + return `update` + case DiffTriggerOperation.DELETE: + return `delete` + } +} diff --git a/packages/powersync-db-collection/src/index.ts b/packages/powersync-db-collection/src/index.ts new file mode 100644 index 000000000..30e35e857 --- /dev/null +++ b/packages/powersync-db-collection/src/index.ts @@ -0,0 +1 @@ +export * from "./powersync" diff --git a/packages/powersync-db-collection/src/powersync.ts b/packages/powersync-db-collection/src/powersync.ts new file mode 100644 index 000000000..b204c04bf --- /dev/null +++ b/packages/powersync-db-collection/src/powersync.ts @@ -0,0 +1,340 @@ +import { DiffTriggerOperation } from "@powersync/common" +import { PendingOperationStore } from "./PendingOperationStore" +import { PowerSyncTransactor } from "./PowerSyncTransactor" +import { mapOperation } from "./helpers" +import type { PendingOperation } from "./PendingOperationStore" +import type { + BaseCollectionConfig, + CollectionConfig, + InferSchemaOutput, + SyncConfig, + Transaction, +} from "@tanstack/db" +import type { + AbstractPowerSyncDatabase, + TriggerDiffRecord, +} from "@powersync/common" +import type { StandardSchemaV1 } from "@standard-schema/spec" + +/** + * Configuration interface for PowerSync collection options + * @template T - The type of items in the collection + * @template TSchema - The schema type for validation + */ +/** + * Configuration options for creating a PowerSync collection. + * + * @example + * ```typescript + * const APP_SCHEMA = new Schema({ + * documents: new Table({ + * name: column.text, + * }), + * }) + * + * type Document = (typeof APP_SCHEMA)["types"]["documents"] + * + * const db = new PowerSyncDatabase({ + * database: { + * dbFilename: "test.sqlite", + * }, + * schema: APP_SCHEMA, + * }) + * + * const collection = createCollection( + * powerSyncCollectionOptions({ + * database: db, + * tableName: "documents", + * }) + * ) + * ``` + */ +export type PowerSyncCollectionConfig< + T extends object = Record, + TSchema extends StandardSchemaV1 = never, +> = Omit< + BaseCollectionConfig, + `onInsert` | `onUpdate` | `onDelete` | `getKey` +> & { + /** The name of the table in PowerSync database */ + tableName: string + /** The PowerSync database instance */ + database: AbstractPowerSyncDatabase +} + +export type PowerSyncCollectionUtils = { + /** + * Applies mutations to the PowerSync database. This method is called automatically by the collection's + * insert, update, and delete operations. You typically don't need to call this directly unless you + * have special transaction requirements. + * + * @example + * ```typescript + * // Create a collection + * const collection = createCollection( + * powerSyncCollectionOptions({ + * database: db, + * tableName: "documents", + * }) + * ) + * + * const addTx = createTransaction({ + * autoCommit: false, + * mutationFn: async ({ transaction }) => { + * await collection.utils.mutateTransaction(transaction) + * }, + * }) + * + * addTx.mutate(() => { + * for (let i = 0; i < 5; i++) { + * collection.insert({ id: randomUUID(), name: `tx-${i}` }) + * } + * }) + * + * await addTx.commit() + * await addTx.isPersisted.promise + * ``` + * + * @param transaction - The transaction containing mutations to apply + * @returns A promise that resolves when the mutations have been persisted to PowerSync + */ + mutateTransaction: (transaction: Transaction) => Promise +} + +/** + * Creates PowerSync collection options for use with a standard Collection + * + * @template TExplicit - The explicit type of items in the collection (highest priority) + * @template TSchema - The schema type for validation and type inference (second priority) + * @param config - Configuration options for the PowerSync collection + * @returns Collection options with utilities + */ + +// Overload for when schema is provided +/** + * Creates a PowerSync collection configuration with schema validation. + * + * @example + * ```typescript + * // With schema validation + * const APP_SCHEMA = new Schema({ + * documents: new Table({ + * name: column.text, + * }), + * }) + * + * const collection = createCollection( + * powerSyncCollectionOptions({ + * database: db, + * tableName: "documents", + * schema: APP_SCHEMA, + * }) + * ) + * ``` + */ +export function powerSyncCollectionOptions( + config: PowerSyncCollectionConfig, T> +): CollectionConfig, string, T> & { + schema: T + utils: PowerSyncCollectionUtils +} + +/** + * Creates a PowerSync collection configuration without schema validation. + * + * @example + * ```typescript + * const APP_SCHEMA = new Schema({ + * documents: new Table({ + * name: column.text, + * }), + * }) + * + * type Document = (typeof APP_SCHEMA)["types"]["documents"] + * + * const db = new PowerSyncDatabase({ + * database: { + * dbFilename: "test.sqlite", + * }, + * schema: APP_SCHEMA, + * }) + * + * const collection = createCollection( + * powerSyncCollectionOptions({ + * database: db, + * tableName: "documents", + * }) + * ) + * ``` + */ +export function powerSyncCollectionOptions( + config: PowerSyncCollectionConfig & { + schema?: never + } +): CollectionConfig & { + schema?: never + utils: PowerSyncCollectionUtils +} + +/** + * Implementation of powerSyncCollectionOptions that handles both schema and non-schema configurations. + */ +export function powerSyncCollectionOptions< + T extends object = Record, + TSchema extends StandardSchemaV1 = never, +>( + config: PowerSyncCollectionConfig +): CollectionConfig & { + id?: string + utils: PowerSyncCollectionUtils + schema?: TSchema +} { + type Row = Record + type Key = string // we always use uuids for keys + + const { database, tableName, ...restConfig } = config + + /** + * The onInsert, onUpdate, onDelete handlers should only return + * after we have written the changes to Tanstack DB. + * We currently only write to Tanstack DB from a diff trigger. + * We wait for the diff trigger to observe the change, + * and only then return from the on[X] handlers. + * This ensures that when the transaction is reported as + * complete to the caller, the in-memory state is already + * consistent with the database. + */ + const pendingOperationStore = new PendingOperationStore() + const trackedTableName = `__${tableName}_tracking` + + const transactor = new PowerSyncTransactor({ + database, + pendingOperationStore, + tableName, + trackedTableName, + }) + + /** + * "sync" + * Notice that this describes the Sync between the local SQLite table + * and the in-memory tanstack-db collection. + * It is not about sync between a client and a server! + */ + type SyncParams = Parameters[`sync`]>[0] + const sync: SyncConfig = { + sync: async (params: SyncParams) => { + const { begin, write, commit, markReady } = params + + // Manually create a tracking operation for optimization purposes + const abortController = new AbortController() + + database.onChangeWithCallback( + { + onChange: async () => { + await database.writeTransaction(async (context) => { + begin() + const operations = await context.getAll( + `SELECT * FROM ${trackedTableName} ORDER BY timestamp ASC` + ) + const pendingOperations: Array = [] + + for (const op of operations) { + const { id, operation, timestamp, value } = op + const parsedValue = { + id, + ...JSON.parse(value), + } + const parsedPreviousValue = + op.operation == DiffTriggerOperation.UPDATE + ? { id, ...JSON.parse(op.previous_value) } + : null + write({ + type: mapOperation(operation), + value: parsedValue, + previousValue: parsedPreviousValue, + }) + pendingOperations.push({ + id, + operation, + timestamp, + }) + } + + // clear the current operations + await context.execute(`DELETE FROM ${trackedTableName}`) + + commit() + pendingOperationStore.resolvePendingFor(pendingOperations) + }) + }, + }, + { + signal: abortController.signal, + triggerImmediate: false, + tables: [trackedTableName], + } + ) + + const disposeTracking = await database.triggers.createDiffTrigger({ + source: tableName, + destination: trackedTableName, + when: { + [DiffTriggerOperation.INSERT]: `TRUE`, + [DiffTriggerOperation.UPDATE]: `TRUE`, + [DiffTriggerOperation.DELETE]: `TRUE`, + }, + hooks: { + beforeCreate: async (context) => { + begin() + for (const row of await context.getAll>( + `SELECT * FROM ${tableName}` + )) { + write({ + type: `insert`, + value: row, + }) + } + commit() + markReady() + }, + }, + }) + + return () => { + abortController.abort() + disposeTracking() + } + }, + // Expose the getSyncMetadata function + getSyncMetadata: undefined, + } + + const getKey = (record: Record) => record.id as string + + return { + ...restConfig, + getKey, + sync, + onInsert: async (params) => { + // The transaction here should only ever contain a single insert mutation + return await transactor.applyTransaction(params.transaction) + }, + onUpdate: async (params) => { + // The transaction here should only ever contain a single update mutation + return await transactor.applyTransaction(params.transaction) + }, + onDelete: async (params) => { + // The transaction here should only ever contain a single delete mutation + return await transactor.applyTransaction(params.transaction) + }, + utils: { + mutateTransaction: async (transaction: Transaction) => { + return await transactor.applyTransaction(transaction) + }, + }, + } as CollectionConfig & { + id?: string + utils: PowerSyncCollectionUtils + schema?: TSchema + } +} diff --git a/packages/powersync-db-collection/tests/powersync.test.ts b/packages/powersync-db-collection/tests/powersync.test.ts new file mode 100644 index 000000000..4e9676ef0 --- /dev/null +++ b/packages/powersync-db-collection/tests/powersync.test.ts @@ -0,0 +1,257 @@ +import { randomUUID } from "node:crypto" +import { + CrudEntry, + PowerSyncDatabase, + Schema, + Table, + column, +} from "@powersync/node" +import { createCollection, createTransaction } from "@tanstack/db" +import { describe, expect, it, onTestFinished, vi } from "vitest" +import { powerSyncCollectionOptions } from "../src" +import type { AbstractPowerSyncDatabase } from "@powersync/node" + +const APP_SCHEMA = new Schema({ + documents: new Table({ + name: column.text, + }), +}) + +type Document = (typeof APP_SCHEMA)[`types`][`documents`] + +describe(`PowerSync Integration`, () => { + async function createDatabase() { + const db = new PowerSyncDatabase({ + database: { + dbFilename: `test.sqlite`, + }, + schema: APP_SCHEMA, + }) + onTestFinished(async () => { + await db.disconnectAndClear() + await db.close() + }) + await db.disconnectAndClear() + return db + } + + async function createTestData(db: AbstractPowerSyncDatabase) { + await db.execute(` + INSERT into documents (id, name) + VALUES + (uuid(), 'one'), + (uuid(), 'two'), + (uuid(), 'three') + `) + } + + describe(`sync`, () => { + it(`should initialize and fetch initial data`, async () => { + const db = await createDatabase() + await createTestData(db) + + const collection = createCollection( + powerSyncCollectionOptions({ + database: db, + tableName: `documents`, + }) + ) + + await collection.stateWhenReady() + + // Verify the collection state contains our items + expect(collection.size).toBe(3) + expect(collection.toArray.map((entry) => entry.name)).deep.equals([ + `one`, + `two`, + `three`, + ]) + }) + }) + + it(`should update when data syncs`, async () => { + const db = await createDatabase() + await createTestData(db) + + const collection = createCollection( + powerSyncCollectionOptions({ + database: db, + tableName: `documents`, + }) + ) + + await collection.stateWhenReady() + + // Verify the collection state contains our items + expect(collection.size).toBe(3) + + // Make an update, simulates a sync from another client + await db.execute(` + INSERT into documents (id, name) + VALUES + (uuid(), 'four') + `) + + // The collection should update + await vi.waitFor( + () => { + expect(collection.size).toBe(4) + expect(collection.toArray.map((entry) => entry.name)).deep.equals([ + `one`, + `two`, + `three`, + `four`, + ]) + }, + { timeout: 1000 } + ) + + await db.execute(` + DELETE from documents + WHERE name = 'two' + `) + + // The collection should update + await vi.waitFor( + () => { + expect(collection.size).toBe(3) + expect(collection.toArray.map((entry) => entry.name)).deep.equals([ + `one`, + `three`, + `four`, + ]) + }, + { timeout: 1000 } + ) + + await db.execute(` + UPDATE documents + SET name = 'updated' + WHERE name = 'one' + `) + + // The collection should update + await vi.waitFor( + () => { + expect(collection.size).toBe(3) + expect(collection.toArray.map((entry) => entry.name)).deep.equals([ + `updated`, + `three`, + `four`, + ]) + }, + { timeout: 1000 } + ) + }) + + it(`should propagate collection mutations to PowerSync`, async () => { + const db = await createDatabase() + await createTestData(db) + + const collection = createCollection( + powerSyncCollectionOptions({ + database: db, + tableName: `documents`, + }) + ) + + await collection.stateWhenReady() + + // Verify the collection state contains our items + expect(collection.size).toBe(3) + + const id = randomUUID() + const tx = collection.insert({ + id, + name: `new`, + }) + + // The insert should optimistically update the collection + const newDoc = collection.get(id) + expect(newDoc).toBeDefined() + expect(newDoc!.name).toBe(`new`) + + await tx.isPersisted.promise + // The item should now be present in PowerSync + // We should also have patched it back in to Tanstack DB (removing the optimistic state) + + // Now do an update + await collection.update(id, (d) => (d.name = `updatedNew`)).isPersisted + .promise + + const updatedDoc = collection.get(id) + expect(updatedDoc).toBeDefined() + expect(updatedDoc!.name).toBe(`updatedNew`) + + await collection.delete(id).isPersisted.promise + + // There should be a crud entries for this + const _crudEntries = await db.getAll(` + SELECT * FROM ps_crud ORDER BY id`) + + const crudEntries = _crudEntries.map((r) => CrudEntry.fromRow(r as any)) + + expect(crudEntries.length).toBe(6) + // We can only group transactions for similar operations + expect(crudEntries.map((e) => e.op)).toEqual([ + `PUT`, + `PUT`, + `PUT`, + `PUT`, + `PATCH`, + `DELETE`, + ]) + }) + + it(`should handle transactions`, async () => { + const db = await createDatabase() + await createTestData(db) + + const collection = createCollection( + powerSyncCollectionOptions({ + database: db, + tableName: `documents`, + }) + ) + + await collection.stateWhenReady() + + expect(collection.size).toBe(3) + + const addTx = createTransaction({ + autoCommit: false, + mutationFn: async ({ transaction }) => { + await collection.utils.mutateTransaction(transaction) + }, + }) + + addTx.mutate(() => { + for (let i = 0; i < 5; i++) { + collection.insert({ id: randomUUID(), name: `tx-${i}` }) + } + }) + + await addTx.commit() + await addTx.isPersisted.promise + + expect(collection.size).toBe(8) + + // fetch the ps_crud items + // There should be a crud entries for this + const _crudEntries = await db.getAll(` + SELECT * FROM ps_crud ORDER BY id`) + const crudEntries = _crudEntries.map((r) => CrudEntry.fromRow(r as any)) + + const lastTransactionId = crudEntries[crudEntries.length - 1]?.transactionId + /** + * The last items, created in the same transaction, should be in the same + * PowerSync transaction. + */ + expect( + crudEntries + .reverse() + .slice(0, 5) + .every((crudEntry) => crudEntry.transactionId == lastTransactionId) + ).true + }) +}) diff --git a/packages/powersync-db-collection/tsconfig.docs.json b/packages/powersync-db-collection/tsconfig.docs.json new file mode 100644 index 000000000..5a73feb02 --- /dev/null +++ b/packages/powersync-db-collection/tsconfig.docs.json @@ -0,0 +1,9 @@ +{ + "extends": "./tsconfig.json", + "compilerOptions": { + "paths": { + "@tanstack/db": ["../db/src"] + } + }, + "include": ["src"] +} diff --git a/packages/powersync-db-collection/tsconfig.json b/packages/powersync-db-collection/tsconfig.json new file mode 100644 index 000000000..7e586bab3 --- /dev/null +++ b/packages/powersync-db-collection/tsconfig.json @@ -0,0 +1,20 @@ +{ + "extends": "../../tsconfig.json", + "compilerOptions": { + "target": "ES2020", + "module": "ESNext", + "moduleResolution": "Bundler", + "declaration": true, + "outDir": "dist", + "strict": true, + "esModuleInterop": true, + "skipLibCheck": true, + "forceConsistentCasingInFileNames": true, + "jsx": "react", + "paths": { + "@tanstack/store": ["../store/src"] + } + }, + "include": ["src", "tests", "vite.config.ts"], + "exclude": ["node_modules", "dist"] +} diff --git a/packages/powersync-db-collection/vite.config.ts b/packages/powersync-db-collection/vite.config.ts new file mode 100644 index 000000000..af20d0da3 --- /dev/null +++ b/packages/powersync-db-collection/vite.config.ts @@ -0,0 +1,21 @@ +import { tanstackViteConfig } from "@tanstack/config/vite" +import { defineConfig, mergeConfig } from "vitest/config" +import packageJson from "./package.json" + +const config = defineConfig({ + test: { + name: packageJson.name, + dir: `./tests`, + environment: `node`, + coverage: { enabled: true, provider: `istanbul`, include: [`src/**/*`] }, + typecheck: { enabled: true }, + }, +}) + +export default mergeConfig( + config, + tanstackViteConfig({ + entry: `./src/index.ts`, + srcDir: `./src`, + }) +) diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index 4ce29fc7f..1c453d7a0 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -475,7 +475,7 @@ importers: version: 0.44.5(@types/pg@8.15.5)(gel@2.1.1)(kysely@0.28.5)(pg@8.16.3)(postgres@3.4.7) drizzle-zod: specifier: ^0.8.3 - version: 0.8.3(drizzle-orm@0.44.5(@types/pg@8.15.5)(gel@2.1.1)(kysely@0.28.5)(pg@8.16.3)(postgres@3.4.7))(zod@4.1.5) + version: 0.8.3(drizzle-orm@0.44.5(@types/pg@8.15.5)(gel@2.1.1)(kysely@0.28.5)(pg@8.16.3)(postgres@3.4.7))(zod@3.25.76) express: specifier: ^4.21.2 version: 4.21.2 @@ -647,6 +647,40 @@ importers: specifier: ^3.2.4 version: 3.2.4(vitest@3.2.4(@types/debug@4.1.12)(@types/node@24.3.1)(jiti@2.5.1)(jsdom@27.0.0(postcss@8.5.6))(lightningcss@1.30.1)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.5)(yaml@2.8.1)) + packages/powersync-db-collection: + dependencies: + '@powersync/common': + specifier: ^1.39.0 + version: 1.39.0 + '@standard-schema/spec': + specifier: ^1.0.0 + version: 1.0.0 + '@tanstack/db': + specifier: workspace:* + version: link:../db + '@tanstack/store': + specifier: ^0.7.7 + version: 0.7.7 + debug: + specifier: ^4.4.3 + version: 4.4.3 + p-defer: + specifier: ^4.0.1 + version: 4.0.1 + devDependencies: + '@powersync/better-sqlite3': + specifier: ^0.2.0 + version: 0.2.0 + '@powersync/node': + specifier: ^0.11.0 + version: 0.11.0(@powersync/common@1.39.0) + '@types/debug': + specifier: ^4.1.12 + version: 4.1.12 + '@vitest/coverage-istanbul': + specifier: ^3.2.4 + version: 3.2.4(vitest@3.2.4(@types/debug@4.1.12)(@types/node@24.3.1)(jiti@2.5.1)(jsdom@27.0.0(postcss@8.5.6))(lightningcss@1.30.1)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.5)(yaml@2.8.1)) + packages/query-db-collection: dependencies: '@standard-schema/spec': @@ -754,7 +788,7 @@ importers: version: 1.9.9 vite-plugin-solid: specifier: ^2.11.8 - version: 2.11.8(@testing-library/jest-dom@6.8.0)(solid-js@1.9.9)(vite@7.1.5(@types/node@24.3.1)(jiti@2.5.1)(lightningcss@1.30.1)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.5)(yaml@2.8.1)) + version: 2.11.8(@testing-library/jest-dom@6.8.0)(solid-js@1.9.9)(vite@6.3.6(@types/node@24.3.1)(jiti@2.5.1)(lightningcss@1.30.1)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.5)(yaml@2.8.1)) vitest: specifier: ^3.2.4 version: 3.2.4(@types/debug@4.1.12)(@types/node@24.3.1)(jiti@2.5.1)(jsdom@27.0.0(postcss@8.5.6))(lightningcss@1.30.1)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.5)(yaml@2.8.1) @@ -770,7 +804,7 @@ importers: version: 2.5.3(svelte@5.39.4)(typescript@5.9.2) '@sveltejs/vite-plugin-svelte': specifier: ^6.2.0 - version: 6.2.0(svelte@5.39.4)(vite@7.1.5(@types/node@24.3.1)(jiti@2.5.1)(lightningcss@1.30.1)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.5)(yaml@2.8.1)) + version: 6.2.0(svelte@5.39.4)(vite@6.3.6(@types/node@24.3.1)(jiti@2.5.1)(lightningcss@1.30.1)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.5)(yaml@2.8.1)) '@vitest/coverage-istanbul': specifier: ^3.2.4 version: 3.2.4(vitest@3.2.4(@types/debug@4.1.12)(@types/node@24.3.1)(jiti@2.5.1)(jsdom@27.0.0(postcss@8.5.6))(lightningcss@1.30.1)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.5)(yaml@2.8.1)) @@ -823,7 +857,7 @@ importers: version: 1.0.10 '@vitejs/plugin-vue': specifier: ^5.2.4 - version: 5.2.4(vite@7.1.5(@types/node@24.3.1)(jiti@2.5.1)(lightningcss@1.30.1)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.5)(yaml@2.8.1))(vue@3.5.21(typescript@5.9.2)) + version: 5.2.4(vite@6.3.6(@types/node@24.3.1)(jiti@2.5.1)(lightningcss@1.30.1)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.5)(yaml@2.8.1))(vue@3.5.21(typescript@5.9.2)) '@vitest/coverage-istanbul': specifier: ^3.2.4 version: 3.2.4(vitest@3.2.4(@types/debug@4.1.12)(@types/node@24.3.1)(jiti@2.5.1)(jsdom@27.0.0(postcss@8.5.6))(lightningcss@1.30.1)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.5)(yaml@2.8.1)) @@ -2695,6 +2729,17 @@ packages: '@poppinss/exception@1.2.2': resolution: {integrity: sha512-m7bpKCD4QMlFCjA/nKTs23fuvoVFoA83brRKmObCUNmi/9tVu8Ve3w4YQAnJu4q3Tjf5fr685HYIC/IA2zHRSg==} + '@powersync/better-sqlite3@0.2.0': + resolution: {integrity: sha512-8otwueqHJqwilUz/vLENlpMp2c4k/TV6hGX016XrZxSkizDAil99yRm7lAVwpbYYGuSgyzidyDh6vy6PY+m4kw==} + + '@powersync/common@1.39.0': + resolution: {integrity: sha512-qGPl/LPRoopNWjduGXfN+P3PsdTMfFR9YI2TbsLA++VRMK+10To9ey3Z6yprKoVbdLmisPde9mAaTvb1ugkeyg==} + + '@powersync/node@0.11.0': + resolution: {integrity: sha512-33J3/TnZ+s9mu0pHFfJCZhSQp7C+Ai4/1sBxC7aNdiRCyvg8DBYY8P7gMYXlyZfyMQHc0hfs3GDJzxYOBMNAyQ==} + peerDependencies: + '@powersync/common': ^1.39.0 + '@protobufjs/aspromise@1.1.2': resolution: {integrity: sha512-j+gKExEuLmKwvz3OgROXtrJ2UG2x8Ch2YZUxahh+s1F2HZ+wAceUNLkvy6zKCPVRkU++ZWQrdxsUeQXmcg4uoQ==} @@ -4192,6 +4237,9 @@ packages: resolution: {integrity: sha512-hsU18Ae8CDTR6Kgu9DYf0EbCr/a5iGL0rytQDobUcdpYOKokk8LEjVphnXkDkgpi0wYVsqrXuP0bZxJaTqdgoA==} engines: {node: '>= 0.4'} + async-lock@1.4.1: + resolution: {integrity: sha512-Az2ZTpuytrtqENulXwO3GGv1Bztugx6TT37NIo7imr/Qo0gsYiGtSdBa2B6fsXhTpVZDNfu1Qn3pk531e3q+nQ==} + async-sema@3.1.1: resolution: {integrity: sha512-tLRNUXati5MFePdAk8dw7Qt7DpxPB60ofAgn8WRhW6a2rcimZnYBP9oxHiv0OHy+Wz7kPMG+t4LGdt31+4EmGg==} @@ -4492,6 +4540,9 @@ packages: colorette@2.0.20: resolution: {integrity: sha512-IfEDxwoWIjkeXL1eXcDiow4UbKjhLdq6/EuSVR9GMN7KVH3r9gQ83e73hsz1Nd1T3ijd5xv1wcWRYO+D6kCI2w==} + comlink@4.4.2: + resolution: {integrity: sha512-OxGdvBmJuNKSCMO4NTl1L47VRp6xn2wG4F/2hYzB6tiCb709otOxtEYCSvK80PtjODfXXZu8ds+Nw5kVCjqd2g==} + commander@11.1.0: resolution: {integrity: sha512-yPVavfyCcRhmorC7rWlkHn15b4wDVgVmBA7kV4QVBsF7kv/9TKJAbAXVTxvTnwP8HHKjRCJDClKbciiYS7p0DQ==} engines: {node: '>=16'} @@ -6165,6 +6216,9 @@ packages: js-base64@3.7.8: resolution: {integrity: sha512-hNngCeKxIUQiEUN3GPJOkz4wF/YvdUdbNL9hsBcMQTkKzboD7T/q3OYOuuPZLUE6dBxSGpwhk5mwuDud7JVAow==} + js-logger@1.6.1: + resolution: {integrity: sha512-yTgMCPXVjhmg28CuUH8CKjU+cIKL/G+zTu4Fn4lQxs8mRFH/03QTNvEFngcxfg/gRDiQAOoyCKmMTOm9ayOzXA==} + js-tokens@4.0.0: resolution: {integrity: sha512-RdJUflcE3cUzKiMqQgsCu06FPu9UdIJO0beYbPhHN4k6apgJtifcoCtT9bcxOpYBtpD2kCM6Sbzg4CausW/PKQ==} @@ -7035,6 +7089,10 @@ packages: oxc-resolver@11.8.2: resolution: {integrity: sha512-SM31gnF1l4T8YA7dkAcBhA+jc336bc8scy0Tetz6ndzGmV6c0R99SRnx6In0V5ffwvn1Isjo9I9EGSLF4xi3TA==} + p-defer@4.0.1: + resolution: {integrity: sha512-Mr5KC5efvAK5VUptYEIopP1bakB85k2IWXaRC0rsh1uwn1L6M0LVml8OIQ4Gudg4oyZakf7FmeRLkMMtZW1i5A==} + engines: {node: '>=12'} + p-filter@2.1.0: resolution: {integrity: sha512-ZBxxZ5sL2HghephhpGAQdoskxplTwr7ICaehZwLIlfL6acuVgZPm8yBNuRAFBGEqtD/hmUeq9eqLg2ys9Xr/yw==} engines: {node: '>=8'} @@ -11086,6 +11144,23 @@ snapshots: '@poppinss/exception@1.2.2': {} + '@powersync/better-sqlite3@0.2.0': + dependencies: + bindings: 1.5.0 + + '@powersync/common@1.39.0': + dependencies: + js-logger: 1.6.1 + + '@powersync/node@0.11.0(@powersync/common@1.39.0)': + dependencies: + '@powersync/better-sqlite3': 0.2.0 + '@powersync/common': 1.39.0 + async-lock: 1.4.1 + bson: 6.10.4 + comlink: 4.4.2 + undici: 7.16.0 + '@protobufjs/aspromise@1.1.2': {} '@protobufjs/base64@1.1.2': {} @@ -11548,24 +11623,24 @@ snapshots: transitivePeerDependencies: - typescript - '@sveltejs/vite-plugin-svelte-inspector@5.0.1(@sveltejs/vite-plugin-svelte@6.2.0(svelte@5.39.4)(vite@7.1.5(@types/node@24.3.1)(jiti@2.5.1)(lightningcss@1.30.1)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.5)(yaml@2.8.1)))(svelte@5.39.4)(vite@7.1.5(@types/node@24.3.1)(jiti@2.5.1)(lightningcss@1.30.1)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.5)(yaml@2.8.1))': + '@sveltejs/vite-plugin-svelte-inspector@5.0.1(@sveltejs/vite-plugin-svelte@6.2.0(svelte@5.39.4)(vite@6.3.6(@types/node@24.3.1)(jiti@2.5.1)(lightningcss@1.30.1)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.5)(yaml@2.8.1)))(svelte@5.39.4)(vite@6.3.6(@types/node@24.3.1)(jiti@2.5.1)(lightningcss@1.30.1)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.5)(yaml@2.8.1))': dependencies: - '@sveltejs/vite-plugin-svelte': 6.2.0(svelte@5.39.4)(vite@7.1.5(@types/node@24.3.1)(jiti@2.5.1)(lightningcss@1.30.1)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.5)(yaml@2.8.1)) + '@sveltejs/vite-plugin-svelte': 6.2.0(svelte@5.39.4)(vite@6.3.6(@types/node@24.3.1)(jiti@2.5.1)(lightningcss@1.30.1)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.5)(yaml@2.8.1)) debug: 4.4.3 svelte: 5.39.4 - vite: 7.1.5(@types/node@24.3.1)(jiti@2.5.1)(lightningcss@1.30.1)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.5)(yaml@2.8.1) + vite: 6.3.6(@types/node@24.3.1)(jiti@2.5.1)(lightningcss@1.30.1)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.5)(yaml@2.8.1) transitivePeerDependencies: - supports-color - '@sveltejs/vite-plugin-svelte@6.2.0(svelte@5.39.4)(vite@7.1.5(@types/node@24.3.1)(jiti@2.5.1)(lightningcss@1.30.1)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.5)(yaml@2.8.1))': + '@sveltejs/vite-plugin-svelte@6.2.0(svelte@5.39.4)(vite@6.3.6(@types/node@24.3.1)(jiti@2.5.1)(lightningcss@1.30.1)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.5)(yaml@2.8.1))': dependencies: - '@sveltejs/vite-plugin-svelte-inspector': 5.0.1(@sveltejs/vite-plugin-svelte@6.2.0(svelte@5.39.4)(vite@7.1.5(@types/node@24.3.1)(jiti@2.5.1)(lightningcss@1.30.1)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.5)(yaml@2.8.1)))(svelte@5.39.4)(vite@7.1.5(@types/node@24.3.1)(jiti@2.5.1)(lightningcss@1.30.1)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.5)(yaml@2.8.1)) + '@sveltejs/vite-plugin-svelte-inspector': 5.0.1(@sveltejs/vite-plugin-svelte@6.2.0(svelte@5.39.4)(vite@6.3.6(@types/node@24.3.1)(jiti@2.5.1)(lightningcss@1.30.1)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.5)(yaml@2.8.1)))(svelte@5.39.4)(vite@6.3.6(@types/node@24.3.1)(jiti@2.5.1)(lightningcss@1.30.1)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.5)(yaml@2.8.1)) debug: 4.4.3 deepmerge: 4.3.1 magic-string: 0.30.19 svelte: 5.39.4 - vite: 7.1.5(@types/node@24.3.1)(jiti@2.5.1)(lightningcss@1.30.1)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.5)(yaml@2.8.1) - vitefu: 1.1.1(vite@7.1.5(@types/node@24.3.1)(jiti@2.5.1)(lightningcss@1.30.1)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.5)(yaml@2.8.1)) + vite: 6.3.6(@types/node@24.3.1)(jiti@2.5.1)(lightningcss@1.30.1)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.5)(yaml@2.8.1) + vitefu: 1.1.1(vite@6.3.6(@types/node@24.3.1)(jiti@2.5.1)(lightningcss@1.30.1)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.5)(yaml@2.8.1)) transitivePeerDependencies: - supports-color @@ -12825,9 +12900,9 @@ snapshots: transitivePeerDependencies: - supports-color - '@vitejs/plugin-vue@5.2.4(vite@7.1.5(@types/node@24.3.1)(jiti@2.5.1)(lightningcss@1.30.1)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.5)(yaml@2.8.1))(vue@3.5.21(typescript@5.9.2))': + '@vitejs/plugin-vue@5.2.4(vite@6.3.6(@types/node@24.3.1)(jiti@2.5.1)(lightningcss@1.30.1)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.5)(yaml@2.8.1))(vue@3.5.21(typescript@5.9.2))': dependencies: - vite: 7.1.5(@types/node@24.3.1)(jiti@2.5.1)(lightningcss@1.30.1)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.5)(yaml@2.8.1) + vite: 6.3.6(@types/node@24.3.1)(jiti@2.5.1)(lightningcss@1.30.1)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.5)(yaml@2.8.1) vue: 3.5.21(typescript@5.9.2) '@vitest/coverage-istanbul@3.2.4(vitest@3.2.4(@types/debug@4.1.12)(@types/node@24.3.1)(jiti@2.5.1)(jsdom@27.0.0(postcss@8.5.6))(lightningcss@1.30.1)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.5)(yaml@2.8.1))': @@ -12862,14 +12937,6 @@ snapshots: optionalDependencies: vite: 6.3.6(@types/node@22.18.1)(jiti@2.5.1)(lightningcss@1.30.1)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.5)(yaml@2.8.1) - '@vitest/mocker@3.2.4(vite@6.3.6(@types/node@24.3.1)(jiti@2.5.1)(lightningcss@1.30.1)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.5)(yaml@2.8.1))': - dependencies: - '@vitest/spy': 3.2.4 - estree-walker: 3.0.3 - magic-string: 0.30.19 - optionalDependencies: - vite: 6.3.6(@types/node@24.3.1)(jiti@2.5.1)(lightningcss@1.30.1)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.5)(yaml@2.8.1) - '@vitest/pretty-format@3.2.4': dependencies: tinyrainbow: 2.0.0 @@ -13258,6 +13325,8 @@ snapshots: async-function@1.0.0: {} + async-lock@1.4.1: {} + async-sema@3.1.1: {} async@3.2.6: {} @@ -13621,6 +13690,8 @@ snapshots: colorette@2.0.20: {} + comlink@4.4.2: {} + commander@11.1.0: optional: true @@ -15424,6 +15495,8 @@ snapshots: js-base64@3.7.8: {} + js-logger@1.6.1: {} + js-tokens@4.0.0: {} js-tokens@9.0.1: {} @@ -16471,6 +16544,8 @@ snapshots: '@oxc-resolver/binding-win32-ia32-msvc': 11.8.2 '@oxc-resolver/binding-win32-x64-msvc': 11.8.2 + p-defer@4.0.1: {} + p-filter@2.1.0: dependencies: p-map: 2.1.0 @@ -18336,22 +18411,6 @@ snapshots: '@testing-library/jest-dom': 6.8.0 transitivePeerDependencies: - supports-color - optional: true - - vite-plugin-solid@2.11.8(@testing-library/jest-dom@6.8.0)(solid-js@1.9.9)(vite@7.1.5(@types/node@24.3.1)(jiti@2.5.1)(lightningcss@1.30.1)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.5)(yaml@2.8.1)): - dependencies: - '@babel/core': 7.28.4 - '@types/babel__core': 7.20.5 - babel-preset-solid: 1.9.9(@babel/core@7.28.4)(solid-js@1.9.9) - merge-anything: 5.1.7 - solid-js: 1.9.9 - solid-refresh: 0.6.3(solid-js@1.9.9) - vite: 7.1.5(@types/node@24.3.1)(jiti@2.5.1)(lightningcss@1.30.1)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.5)(yaml@2.8.1) - vitefu: 1.1.1(vite@7.1.5(@types/node@24.3.1)(jiti@2.5.1)(lightningcss@1.30.1)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.5)(yaml@2.8.1)) - optionalDependencies: - '@testing-library/jest-dom': 6.8.0 - transitivePeerDependencies: - - supports-color vite-tsconfig-paths@5.1.4(typescript@5.9.2)(vite@6.3.6(@types/node@22.18.1)(jiti@2.5.1)(lightningcss@1.30.1)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.5)(yaml@2.8.1)): dependencies: @@ -18437,10 +18496,6 @@ snapshots: optionalDependencies: vite: 6.3.6(@types/node@24.3.1)(jiti@2.5.1)(lightningcss@1.30.1)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.5)(yaml@2.8.1) - vitefu@1.1.1(vite@7.1.5(@types/node@24.3.1)(jiti@2.5.1)(lightningcss@1.30.1)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.5)(yaml@2.8.1)): - optionalDependencies: - vite: 7.1.5(@types/node@24.3.1)(jiti@2.5.1)(lightningcss@1.30.1)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.5)(yaml@2.8.1) - vitest@3.2.4(@types/debug@4.1.12)(@types/node@22.18.1)(jiti@2.5.1)(jsdom@27.0.0(postcss@8.5.6))(lightningcss@1.30.1)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.5)(yaml@2.8.1): dependencies: '@types/chai': 5.2.2 @@ -18488,7 +18543,7 @@ snapshots: dependencies: '@types/chai': 5.2.2 '@vitest/expect': 3.2.4 - '@vitest/mocker': 3.2.4(vite@6.3.6(@types/node@24.3.1)(jiti@2.5.1)(lightningcss@1.30.1)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.5)(yaml@2.8.1)) + '@vitest/mocker': 3.2.4(vite@6.3.6(@types/node@22.18.1)(jiti@2.5.1)(lightningcss@1.30.1)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.5)(yaml@2.8.1)) '@vitest/pretty-format': 3.2.4 '@vitest/runner': 3.2.4 '@vitest/snapshot': 3.2.4 From e88623c5f2a0dfcafe74e3f97f1b07edfbfaa9e5 Mon Sep 17 00:00:00 2001 From: stevensJourney Date: Wed, 1 Oct 2025 11:53:28 +0200 Subject: [PATCH 02/25] Add support for transactions with multiple collection types --- .../src/PendingOperationStore.ts | 7 + .../src/PowerSyncTransactor.ts | 233 +++++++++++++----- .../src/definitions.ts | 73 ++++++ .../powersync-db-collection/src/helpers.ts | 18 +- packages/powersync-db-collection/src/index.ts | 2 + .../powersync-db-collection/src/powersync.ts | 137 ++-------- .../tests/powersync.test.ts | 74 +++++- 7 files changed, 363 insertions(+), 181 deletions(-) create mode 100644 packages/powersync-db-collection/src/definitions.ts diff --git a/packages/powersync-db-collection/src/PendingOperationStore.ts b/packages/powersync-db-collection/src/PendingOperationStore.ts index 9353cd03e..c804067b3 100644 --- a/packages/powersync-db-collection/src/PendingOperationStore.ts +++ b/packages/powersync-db-collection/src/PendingOperationStore.ts @@ -3,6 +3,7 @@ import type { DiffTriggerOperation } from "@powersync/common" import type { DeferredPromise } from "p-defer" export type PendingOperation = { + tableName: string operation: DiffTriggerOperation id: string timestamp: string @@ -18,6 +19,11 @@ export type PendingOperation = { export class PendingOperationStore { private pendingOperations = new Map>() + /** + * Globally accessible PendingOperationStore + */ + static GLOBAL = new PendingOperationStore() + /** * @returns A promise which will resolve once the specified operation has been seen. */ @@ -34,6 +40,7 @@ export class PendingOperationStore { for (const operation of operations) { for (const [pendingOp, deferred] of this.pendingOperations.entries()) { if ( + pendingOp.tableName == operation.tableName && pendingOp.operation == operation.operation && pendingOp.id == operation.id && pendingOp.timestamp == operation.timestamp diff --git a/packages/powersync-db-collection/src/PowerSyncTransactor.ts b/packages/powersync-db-collection/src/PowerSyncTransactor.ts index 5d145e270..9ce47e908 100644 --- a/packages/powersync-db-collection/src/PowerSyncTransactor.ts +++ b/packages/powersync-db-collection/src/PowerSyncTransactor.ts @@ -1,37 +1,60 @@ import { sanitizeSQL } from "@powersync/common" import DebugModule from "debug" -import { asPowerSyncRecord } from "./helpers" +import { PendingOperationStore } from "./PendingOperationStore" +import { asPowerSyncRecord, mapOperationToPowerSync } from "./helpers" import type { AbstractPowerSyncDatabase, LockContext } from "@powersync/common" -import type { Transaction } from "@tanstack/db" -import type { - PendingOperation, - PendingOperationStore, -} from "./PendingOperationStore" -import type { PowerSyncRecord } from "./helpers" +import type { PendingMutation, Transaction } from "@tanstack/db" +import type { PendingOperation } from "./PendingOperationStore" +import type { EnhancedPowerSyncCollectionConfig } from "./definitions" const debug = DebugModule.debug(`ts/db:powersync`) export type TransactorOptions = { database: AbstractPowerSyncDatabase - tableName: string - pendingOperationStore: PendingOperationStore - trackedTableName: string } /** - * Handles persisting Tanstack DB transactions to the PowerSync SQLite DB. + * Applies mutations to the PowerSync database. This method is called automatically by the collection's + * insert, update, and delete operations. You typically don't need to call this directly unless you + * have special transaction requirements. + * + * @example + * ```typescript + * // Create a collection + * const collection = createCollection( + * powerSyncCollectionOptions({ + * database: db, + * tableName: "documents", + * }) + * ) + * + * const addTx = createTransaction({ + * autoCommit: false, + * mutationFn: async ({ transaction }) => { + * await new PowerSyncTransactor({database: db}).applyTransaction(transaction) + * }, + * }) + * + * addTx.mutate(() => { + * for (let i = 0; i < 5; i++) { + * collection.insert({ id: randomUUID(), name: `tx-${i}` }) + * } + * }) + * + * await addTx.commit() + * await addTx.isPersisted.promise + * ``` + * + * @param transaction - The transaction containing mutations to apply + * @returns A promise that resolves when the mutations have been persisted to PowerSync */ export class PowerSyncTransactor> { database: AbstractPowerSyncDatabase pendingOperationStore: PendingOperationStore - tableName: string - trackedTableName: string constructor(options: TransactorOptions) { this.database = options.database - this.pendingOperationStore = options.pendingOperationStore - this.tableName = sanitizeSQL`${options.tableName}` - this.trackedTableName = sanitizeSQL`${options.trackedTableName}` + this.pendingOperationStore = PendingOperationStore.GLOBAL } /** @@ -40,38 +63,49 @@ export class PowerSyncTransactor> { async applyTransaction(transaction: Transaction) { const { mutations } = transaction + if (mutations.length == 0) { + return + } + /** + * The transaction might contain ops for different collections. + * We can do some optimizations for single collection transactions. + */ + const isMixedTransaction = mutations.some( + (mutation) => mutation.collection.id !== mutations[0]?.collection.id + ) // Persist to PowerSync const { whenComplete } = await this.database.writeTransaction( async (tx) => { - for (const mutation of mutations) { + const pendingOperations: Array = [] + + for (const [index, mutation] of mutations.entries()) { + /** + * For mixed transactions we need to check every operation has been seen. + * This is since the individual tables are watched independently. + * + * For a single collection transaction, we only need to check the last operation + * has been seen. + */ + const shouldWait = isMixedTransaction || index == mutations.length - 1 switch (mutation.type) { case `insert`: - await this.handleInsert(asPowerSyncRecord(mutation.modified), tx) + pendingOperations.push( + await this.handleInsert(mutation, tx, shouldWait) + ) break case `update`: - await this.handleUpdate(asPowerSyncRecord(mutation.modified), tx) + pendingOperations.push( + await this.handleUpdate(mutation, tx, shouldWait) + ) break case `delete`: - await this.handleDelete(asPowerSyncRecord(mutation.original), tx) + pendingOperations.push( + await this.handleDelete(mutation, tx, shouldWait) + ) break } } - /** - * Fetch the last diff operation in the queue. - * We need to wait for this operation to be seen by the - * sync handler before returning from the application call. - */ - const lastDiffOp = await tx.getOptional(` - SELECT - id, operation, timestamp - FROM - ${this.trackedTableName} - ORDER BY - timestamp DESC - LIMIT 1 - `) - /** * Return a promise from the writeTransaction, without awaiting it. * This promise will resolve once the entire transaction has been @@ -79,9 +113,11 @@ export class PowerSyncTransactor> { * We return without awaiting in order to free the writeLock. */ return { - whenComplete: lastDiffOp - ? this.pendingOperationStore.waitFor(lastDiffOp) - : Promise.resolve(), + whenComplete: Promise.all( + pendingOperations + .filter((op) => !!op) + .map((op) => this.pendingOperationStore.waitFor(op)) + ), } } ) @@ -91,49 +127,122 @@ export class PowerSyncTransactor> { } protected async handleInsert( - mutation: PowerSyncRecord, - context: LockContext - ) { + mutation: PendingMutation, + context: LockContext, + waitForCompletion: boolean = false + ): Promise { debug(`insert`, mutation) - const keys = Object.keys(mutation).map((key) => sanitizeSQL`${key}`) - await context.execute( - ` - INSERT into ${this.tableName} + + return this.handleOperationWithCompletion( + mutation, + context, + waitForCompletion, + async (tableName, mutation) => { + const keys = Object.keys(mutation.modified).map( + (key) => sanitizeSQL`${key}` + ) + + await context.execute( + ` + INSERT into ${tableName} (${keys.join(`, `)}) VALUES (${keys.map((_) => `?`).join(`, `)}) `, - Object.values(mutation) + Object.values(mutation.modified) + ) + } ) } protected async handleUpdate( - mutation: PowerSyncRecord, - context: LockContext - ) { + mutation: PendingMutation, + context: LockContext, + waitForCompletion: boolean = false + ): Promise { debug(`update`, mutation) - const keys = Object.keys(mutation).map((key) => sanitizeSQL`${key}`) - await context.execute( - ` - UPDATE ${this.tableName} + return this.handleOperationWithCompletion( + mutation, + context, + waitForCompletion, + async (tableName, mutation) => { + const keys = Object.keys(mutation.modified).map( + (key) => sanitizeSQL`${key}` + ) + await context.execute( + ` + UPDATE ${tableName} SET ${keys.map((key) => `${key} = ?`).join(`, `)} WHERE id = ? `, - [...Object.values(mutation), mutation.id] + [ + ...Object.values(mutation.modified), + asPowerSyncRecord(mutation.modified).id, + ] + ) + } ) } protected async handleDelete( - mutation: PowerSyncRecord, - context: LockContext - ) { - debug(`delete`, mutation) - await context.execute( - ` - DELETE FROM ${this.tableName} WHERE id = ? + mutation: PendingMutation, + context: LockContext, + waitForCompletion: boolean = false + ): Promise { + debug(`update`, mutation) + + return this.handleOperationWithCompletion( + mutation, + context, + waitForCompletion, + async (tableName, mutation) => { + await context.execute( + ` + DELETE FROM ${tableName} WHERE id = ? `, - [mutation.id] + [asPowerSyncRecord(mutation.original).id] + ) + } + ) + } + + /** + * Helper function which wraps a persistence operation by: + * - Fetching the mutation's collection's SQLite table details + * - Executing the mutation + * - Returning the last pending diff op if required + */ + protected async handleOperationWithCompletion( + mutation: PendingMutation, + context: LockContext, + waitForCompletion: boolean, + handler: (tableName: string, mutation: PendingMutation) => Promise + ): Promise { + const { tableName, trackedTableName } = ( + mutation.collection.config as EnhancedPowerSyncCollectionConfig + ).utils.getMeta() + + if (!tableName) { + throw new Error(`Could not get tableName from mutation's collection config. + The provided mutation might not have originated from PowerSync.`) + } + + await handler(sanitizeSQL`${tableName}`, mutation) + + if (!waitForCompletion) { + return null + } + + // Need to get the operation in order to wait for it + const diffOperation = await context.get<{ id: string; timestamp: string }>( + sanitizeSQL`SELECT id, timestamp FROM ${trackedTableName} ORDER BY timestamp DESC LIMIT 1` ) + return { + tableName, + id: diffOperation.id, + operation: mapOperationToPowerSync(mutation.type), + timestamp: diffOperation.timestamp, + } } } diff --git a/packages/powersync-db-collection/src/definitions.ts b/packages/powersync-db-collection/src/definitions.ts new file mode 100644 index 000000000..7af17a065 --- /dev/null +++ b/packages/powersync-db-collection/src/definitions.ts @@ -0,0 +1,73 @@ +import type { AbstractPowerSyncDatabase } from "@powersync/common" +import type { StandardSchemaV1 } from "@standard-schema/spec" +import type { BaseCollectionConfig, CollectionConfig } from "@tanstack/db" + +/** + * Configuration interface for PowerSync collection options + * @template T - The type of items in the collection + * @template TSchema - The schema type for validation + */ +/** + * Configuration options for creating a PowerSync collection. + * + * @example + * ```typescript + * const APP_SCHEMA = new Schema({ + * documents: new Table({ + * name: column.text, + * }), + * }) + * + * type Document = (typeof APP_SCHEMA)["types"]["documents"] + * + * const db = new PowerSyncDatabase({ + * database: { + * dbFilename: "test.sqlite", + * }, + * schema: APP_SCHEMA, + * }) + * + * const collection = createCollection( + * powerSyncCollectionOptions({ + * database: db, + * tableName: "documents", + * }) + * ) + * ``` + */ +export type PowerSyncCollectionConfig< + T extends object = Record, + TSchema extends StandardSchemaV1 = never, +> = Omit< + BaseCollectionConfig, + `onInsert` | `onUpdate` | `onDelete` | `getKey` +> & { + /** The name of the table in PowerSync database */ + tableName: string + /** The PowerSync database instance */ + database: AbstractPowerSyncDatabase +} + +export type PowerSyncCollectionMeta = { + /** + * The SQLite table representing the collection. + */ + tableName: string + /** + * The internal table used to track diff for the collection. + */ + trackedTableName: string +} + +export type EnhancedPowerSyncCollectionConfig< + T extends object = Record, + TSchema extends StandardSchemaV1 = never, +> = CollectionConfig & { + id?: string + utils: PowerSyncCollectionUtils + schema?: TSchema +} + +export type PowerSyncCollectionUtils = { + getMeta: () => PowerSyncCollectionMeta +} diff --git a/packages/powersync-db-collection/src/helpers.ts b/packages/powersync-db-collection/src/helpers.ts index 29c42b7a9..13ace1fb6 100644 --- a/packages/powersync-db-collection/src/helpers.ts +++ b/packages/powersync-db-collection/src/helpers.ts @@ -16,7 +16,7 @@ export function asPowerSyncRecord(record: any): PowerSyncRecord { } /** - * Maps Tanstack DB operations to {@link DiffTriggerOperation} + * Maps {@link DiffTriggerOperation} to TanstackDB operations */ export function mapOperation(operation: DiffTriggerOperation) { switch (operation) { @@ -28,3 +28,19 @@ export function mapOperation(operation: DiffTriggerOperation) { return `delete` } } + +/** + * Maps TanstackDB operations to {@link DiffTriggerOperation} + */ +export function mapOperationToPowerSync(operation: string) { + switch (operation) { + case `insert`: + return DiffTriggerOperation.INSERT + case `update`: + return DiffTriggerOperation.UPDATE + case `delete`: + return DiffTriggerOperation.DELETE + default: + throw new Error(`Unknown operation ${operation} received`) + } +} diff --git a/packages/powersync-db-collection/src/index.ts b/packages/powersync-db-collection/src/index.ts index 30e35e857..6c8111f4c 100644 --- a/packages/powersync-db-collection/src/index.ts +++ b/packages/powersync-db-collection/src/index.ts @@ -1 +1,3 @@ +export * from "./definitions" export * from "./powersync" +export * from "./PowerSyncTransactor" diff --git a/packages/powersync-db-collection/src/powersync.ts b/packages/powersync-db-collection/src/powersync.ts index b204c04bf..b67c672ee 100644 --- a/packages/powersync-db-collection/src/powersync.ts +++ b/packages/powersync-db-collection/src/powersync.ts @@ -1,105 +1,20 @@ import { DiffTriggerOperation } from "@powersync/common" +import { asPowerSyncRecord, mapOperation } from "./helpers" import { PendingOperationStore } from "./PendingOperationStore" import { PowerSyncTransactor } from "./PowerSyncTransactor" -import { mapOperation } from "./helpers" +import type { + EnhancedPowerSyncCollectionConfig, + PowerSyncCollectionConfig, + PowerSyncCollectionUtils, +} from "./definitions" import type { PendingOperation } from "./PendingOperationStore" import type { - BaseCollectionConfig, CollectionConfig, InferSchemaOutput, SyncConfig, - Transaction, } from "@tanstack/db" -import type { - AbstractPowerSyncDatabase, - TriggerDiffRecord, -} from "@powersync/common" import type { StandardSchemaV1 } from "@standard-schema/spec" - -/** - * Configuration interface for PowerSync collection options - * @template T - The type of items in the collection - * @template TSchema - The schema type for validation - */ -/** - * Configuration options for creating a PowerSync collection. - * - * @example - * ```typescript - * const APP_SCHEMA = new Schema({ - * documents: new Table({ - * name: column.text, - * }), - * }) - * - * type Document = (typeof APP_SCHEMA)["types"]["documents"] - * - * const db = new PowerSyncDatabase({ - * database: { - * dbFilename: "test.sqlite", - * }, - * schema: APP_SCHEMA, - * }) - * - * const collection = createCollection( - * powerSyncCollectionOptions({ - * database: db, - * tableName: "documents", - * }) - * ) - * ``` - */ -export type PowerSyncCollectionConfig< - T extends object = Record, - TSchema extends StandardSchemaV1 = never, -> = Omit< - BaseCollectionConfig, - `onInsert` | `onUpdate` | `onDelete` | `getKey` -> & { - /** The name of the table in PowerSync database */ - tableName: string - /** The PowerSync database instance */ - database: AbstractPowerSyncDatabase -} - -export type PowerSyncCollectionUtils = { - /** - * Applies mutations to the PowerSync database. This method is called automatically by the collection's - * insert, update, and delete operations. You typically don't need to call this directly unless you - * have special transaction requirements. - * - * @example - * ```typescript - * // Create a collection - * const collection = createCollection( - * powerSyncCollectionOptions({ - * database: db, - * tableName: "documents", - * }) - * ) - * - * const addTx = createTransaction({ - * autoCommit: false, - * mutationFn: async ({ transaction }) => { - * await collection.utils.mutateTransaction(transaction) - * }, - * }) - * - * addTx.mutate(() => { - * for (let i = 0; i < 5; i++) { - * collection.insert({ id: randomUUID(), name: `tx-${i}` }) - * } - * }) - * - * await addTx.commit() - * await addTx.isPersisted.promise - * ``` - * - * @param transaction - The transaction containing mutations to apply - * @returns A promise that resolves when the mutations have been persisted to PowerSync - */ - mutateTransaction: (transaction: Transaction) => Promise -} +import type { TriggerDiffRecord } from "@powersync/common" /** * Creates PowerSync collection options for use with a standard Collection @@ -184,14 +99,7 @@ export function powerSyncCollectionOptions< TSchema extends StandardSchemaV1 = never, >( config: PowerSyncCollectionConfig -): CollectionConfig & { - id?: string - utils: PowerSyncCollectionUtils - schema?: TSchema -} { - type Row = Record - type Key = string // we always use uuids for keys - +): EnhancedPowerSyncCollectionConfig { const { database, tableName, ...restConfig } = config /** @@ -204,14 +112,11 @@ export function powerSyncCollectionOptions< * complete to the caller, the in-memory state is already * consistent with the database. */ - const pendingOperationStore = new PendingOperationStore() + const pendingOperationStore = PendingOperationStore.GLOBAL const trackedTableName = `__${tableName}_tracking` const transactor = new PowerSyncTransactor({ database, - pendingOperationStore, - tableName, - trackedTableName, }) /** @@ -220,9 +125,8 @@ export function powerSyncCollectionOptions< * and the in-memory tanstack-db collection. * It is not about sync between a client and a server! */ - type SyncParams = Parameters[`sync`]>[0] - const sync: SyncConfig = { - sync: async (params: SyncParams) => { + const sync: SyncConfig = { + sync: async (params) => { const { begin, write, commit, markReady } = params // Manually create a tracking operation for optimization purposes @@ -257,6 +161,7 @@ export function powerSyncCollectionOptions< id, operation, timestamp, + tableName, }) } @@ -286,7 +191,7 @@ export function powerSyncCollectionOptions< hooks: { beforeCreate: async (context) => { begin() - for (const row of await context.getAll>( + for (const row of await context.getAll( `SELECT * FROM ${tableName}` )) { write({ @@ -309,9 +214,9 @@ export function powerSyncCollectionOptions< getSyncMetadata: undefined, } - const getKey = (record: Record) => record.id as string + const getKey = (record: T) => asPowerSyncRecord(record).id - return { + const outputConfig: EnhancedPowerSyncCollectionConfig = { ...restConfig, getKey, sync, @@ -328,13 +233,11 @@ export function powerSyncCollectionOptions< return await transactor.applyTransaction(params.transaction) }, utils: { - mutateTransaction: async (transaction: Transaction) => { - return await transactor.applyTransaction(transaction) - }, + getMeta: () => ({ + tableName, + trackedTableName, + }), }, - } as CollectionConfig & { - id?: string - utils: PowerSyncCollectionUtils - schema?: TSchema } + return outputConfig } diff --git a/packages/powersync-db-collection/tests/powersync.test.ts b/packages/powersync-db-collection/tests/powersync.test.ts index 4e9676ef0..86e4bcf42 100644 --- a/packages/powersync-db-collection/tests/powersync.test.ts +++ b/packages/powersync-db-collection/tests/powersync.test.ts @@ -9,15 +9,20 @@ import { import { createCollection, createTransaction } from "@tanstack/db" import { describe, expect, it, onTestFinished, vi } from "vitest" import { powerSyncCollectionOptions } from "../src" +import { PowerSyncTransactor } from "../src/PowerSyncTransactor" import type { AbstractPowerSyncDatabase } from "@powersync/node" const APP_SCHEMA = new Schema({ + users: new Table({ + name: column.text, + }), documents: new Table({ name: column.text, }), }) type Document = (typeof APP_SCHEMA)[`types`][`documents`] +type User = (typeof APP_SCHEMA)[`types`][`users`] describe(`PowerSync Integration`, () => { async function createDatabase() { @@ -221,7 +226,9 @@ describe(`PowerSync Integration`, () => { const addTx = createTransaction({ autoCommit: false, mutationFn: async ({ transaction }) => { - await collection.utils.mutateTransaction(transaction) + await new PowerSyncTransactor({ database: db }).applyTransaction( + transaction + ) }, }) @@ -254,4 +261,69 @@ describe(`PowerSync Integration`, () => { .every((crudEntry) => crudEntry.transactionId == lastTransactionId) ).true }) + + it(`should handle transactions with multiple collections`, async () => { + const db = await createDatabase() + await createTestData(db) + + const documentsCollection = createCollection( + powerSyncCollectionOptions({ + database: db, + tableName: `documents`, + }) + ) + + const usersCollection = createCollection( + powerSyncCollectionOptions({ + database: db, + tableName: `users`, + }) + ) + + await documentsCollection.stateWhenReady() + await usersCollection.stateWhenReady() + + expect(documentsCollection.size).toBe(3) + expect(usersCollection.size).toBe(0) + + const addTx = createTransaction({ + autoCommit: false, + mutationFn: async ({ transaction }) => { + await new PowerSyncTransactor({ database: db }).applyTransaction( + transaction + ) + }, + }) + + addTx.mutate(() => { + for (let i = 0; i < 5; i++) { + documentsCollection.insert({ id: randomUUID(), name: `tx-${i}` }) + usersCollection.insert({ id: randomUUID(), name: `user` }) + } + }) + + await addTx.commit() + await addTx.isPersisted.promise + + expect(documentsCollection.size).toBe(8) + expect(usersCollection.size).toBe(5) + + // fetch the ps_crud items + // There should be a crud entries for this + const _crudEntries = await db.getAll(` + SELECT * FROM ps_crud ORDER BY id`) + const crudEntries = _crudEntries.map((r) => CrudEntry.fromRow(r as any)) + + const lastTransactionId = crudEntries[crudEntries.length - 1]?.transactionId + /** + * The last items, created in the same transaction, should be in the same + * PowerSync transaction. + */ + expect( + crudEntries + .reverse() + .slice(0, 10) + .every((crudEntry) => crudEntry.transactionId == lastTransactionId) + ).true + }) }) From 352829ea0ea740989db7d8731a65a43b3b18f234 Mon Sep 17 00:00:00 2001 From: stevensJourney Date: Thu, 2 Oct 2025 11:02:27 +0200 Subject: [PATCH 03/25] Optimize transaction waiting --- .../src/PowerSyncTransactor.ts | 23 ++++++++++++------- 1 file changed, 15 insertions(+), 8 deletions(-) diff --git a/packages/powersync-db-collection/src/PowerSyncTransactor.ts b/packages/powersync-db-collection/src/PowerSyncTransactor.ts index 9ce47e908..fade901c1 100644 --- a/packages/powersync-db-collection/src/PowerSyncTransactor.ts +++ b/packages/powersync-db-collection/src/PowerSyncTransactor.ts @@ -70,9 +70,18 @@ export class PowerSyncTransactor> { * The transaction might contain ops for different collections. * We can do some optimizations for single collection transactions. */ - const isMixedTransaction = mutations.some( - (mutation) => mutation.collection.id !== mutations[0]?.collection.id + const mutationsCollections = mutations.map( + (mutation) => mutation.collection.id ) + const collectionIds = Array.from(new Set(mutationsCollections)) + const lastCollectionMutationIndexes = new Map() + for (const collectionId of collectionIds) { + lastCollectionMutationIndexes.set( + collectionId, + mutationsCollections.lastIndexOf(collectionId) + ) + } + // Persist to PowerSync const { whenComplete } = await this.database.writeTransaction( async (tx) => { @@ -80,13 +89,11 @@ export class PowerSyncTransactor> { for (const [index, mutation] of mutations.entries()) { /** - * For mixed transactions we need to check every operation has been seen. - * This is since the individual tables are watched independently. - * - * For a single collection transaction, we only need to check the last operation - * has been seen. + * Each collection processes events independently. We need to make sure the + * last operation for each collection has been seen. */ - const shouldWait = isMixedTransaction || index == mutations.length - 1 + const shouldWait = + index == lastCollectionMutationIndexes.get(mutation.collection.id) switch (mutation.type) { case `insert`: pendingOperations.push( From 1c75d3de3aab3356856b51f6a5758854411eb2d1 Mon Sep 17 00:00:00 2001 From: stevensJourney Date: Thu, 2 Oct 2025 11:02:42 +0200 Subject: [PATCH 04/25] Improve test stability --- packages/powersync-db-collection/src/powersync.ts | 15 ++++++++------- .../tests/powersync.test.ts | 13 +++++++++---- 2 files changed, 17 insertions(+), 11 deletions(-) diff --git a/packages/powersync-db-collection/src/powersync.ts b/packages/powersync-db-collection/src/powersync.ts index b67c672ee..f98985eef 100644 --- a/packages/powersync-db-collection/src/powersync.ts +++ b/packages/powersync-db-collection/src/powersync.ts @@ -2,19 +2,19 @@ import { DiffTriggerOperation } from "@powersync/common" import { asPowerSyncRecord, mapOperation } from "./helpers" import { PendingOperationStore } from "./PendingOperationStore" import { PowerSyncTransactor } from "./PowerSyncTransactor" +import type { TriggerDiffRecord } from "@powersync/common" +import type { StandardSchemaV1 } from "@standard-schema/spec" +import type { + CollectionConfig, + InferSchemaOutput, + SyncConfig, +} from "@tanstack/db" import type { EnhancedPowerSyncCollectionConfig, PowerSyncCollectionConfig, PowerSyncCollectionUtils, } from "./definitions" import type { PendingOperation } from "./PendingOperationStore" -import type { - CollectionConfig, - InferSchemaOutput, - SyncConfig, -} from "@tanstack/db" -import type { StandardSchemaV1 } from "@standard-schema/spec" -import type { TriggerDiffRecord } from "@powersync/common" /** * Creates PowerSync collection options for use with a standard Collection @@ -207,6 +207,7 @@ export function powerSyncCollectionOptions< return () => { abortController.abort() + // We unfortunately cannot await this disposeTracking() } }, diff --git a/packages/powersync-db-collection/tests/powersync.test.ts b/packages/powersync-db-collection/tests/powersync.test.ts index 86e4bcf42..3e66a0411 100644 --- a/packages/powersync-db-collection/tests/powersync.test.ts +++ b/packages/powersync-db-collection/tests/powersync.test.ts @@ -1,4 +1,5 @@ import { randomUUID } from "node:crypto" +import { tmpdir } from "node:os" import { CrudEntry, PowerSyncDatabase, @@ -28,15 +29,20 @@ describe(`PowerSync Integration`, () => { async function createDatabase() { const db = new PowerSyncDatabase({ database: { - dbFilename: `test.sqlite`, + dbFilename: `test-${randomUUID()}.sqlite`, + dbLocation: tmpdir(), }, schema: APP_SCHEMA, }) onTestFinished(async () => { - await db.disconnectAndClear() + /** + * We don't clear the DB here since that would cause deletes + * which would trigger collection updates while the DB is closing. + * We currently can't await the async cleanup of TanStack collections (since that method is not async). + * So we use unique temporary databases for each test. + */ await db.close() }) - await db.disconnectAndClear() return db } @@ -84,7 +90,6 @@ describe(`PowerSync Integration`, () => { tableName: `documents`, }) ) - await collection.stateWhenReady() // Verify the collection state contains our items From a892accc24edfa732a5913e57ced64a7f715e4c5 Mon Sep 17 00:00:00 2001 From: stevensJourney Date: Thu, 2 Oct 2025 11:50:06 +0200 Subject: [PATCH 05/25] Improve cleanup behaviour --- .../powersync-db-collection/src/powersync.ts | 155 +++--- .../tests/powersync.test.ts | 483 ++++++++++-------- 2 files changed, 368 insertions(+), 270 deletions(-) diff --git a/packages/powersync-db-collection/src/powersync.ts b/packages/powersync-db-collection/src/powersync.ts index f98985eef..694424077 100644 --- a/packages/powersync-db-collection/src/powersync.ts +++ b/packages/powersync-db-collection/src/powersync.ts @@ -113,7 +113,12 @@ export function powerSyncCollectionOptions< * consistent with the database. */ const pendingOperationStore = PendingOperationStore.GLOBAL - const trackedTableName = `__${tableName}_tracking` + // Keep the tracked table unique in case of multiple tabs. + const trackedTableName = `__${tableName}_tracking_${Math.floor( + Math.random() * 0xffffffff + ) + .toString(16) + .padStart(8, `0`)}` const transactor = new PowerSyncTransactor({ database, @@ -126,89 +131,101 @@ export function powerSyncCollectionOptions< * It is not about sync between a client and a server! */ const sync: SyncConfig = { - sync: async (params) => { + sync: (params) => { const { begin, write, commit, markReady } = params // Manually create a tracking operation for optimization purposes const abortController = new AbortController() - database.onChangeWithCallback( - { - onChange: async () => { - await database.writeTransaction(async (context) => { - begin() - const operations = await context.getAll( - `SELECT * FROM ${trackedTableName} ORDER BY timestamp ASC` - ) - const pendingOperations: Array = [] - - for (const op of operations) { - const { id, operation, timestamp, value } = op - const parsedValue = { - id, - ...JSON.parse(value), + // The sync function needs to be synchronous + async function start() { + database.onChangeWithCallback( + { + onChange: async () => { + await database.writeTransaction(async (context) => { + begin() + const operations = await context.getAll( + `SELECT * FROM ${trackedTableName} ORDER BY timestamp ASC` + ) + const pendingOperations: Array = [] + + for (const op of operations) { + const { id, operation, timestamp, value } = op + const parsedValue = { + id, + ...JSON.parse(value), + } + const parsedPreviousValue = + op.operation == DiffTriggerOperation.UPDATE + ? { id, ...JSON.parse(op.previous_value) } + : null + write({ + type: mapOperation(operation), + value: parsedValue, + previousValue: parsedPreviousValue, + }) + pendingOperations.push({ + id, + operation, + timestamp, + tableName, + }) } - const parsedPreviousValue = - op.operation == DiffTriggerOperation.UPDATE - ? { id, ...JSON.parse(op.previous_value) } - : null + + // clear the current operations + await context.execute(`DELETE FROM ${trackedTableName}`) + + commit() + pendingOperationStore.resolvePendingFor(pendingOperations) + }) + }, + }, + { + signal: abortController.signal, + triggerImmediate: false, + tables: [trackedTableName], + } + ) + + const disposeTracking = await database.triggers.createDiffTrigger({ + source: tableName, + destination: trackedTableName, + when: { + [DiffTriggerOperation.INSERT]: `TRUE`, + [DiffTriggerOperation.UPDATE]: `TRUE`, + [DiffTriggerOperation.DELETE]: `TRUE`, + }, + hooks: { + beforeCreate: async (context) => { + begin() + for (const row of await context.getAll( + `SELECT * FROM ${tableName}` + )) { write({ - type: mapOperation(operation), - value: parsedValue, - previousValue: parsedPreviousValue, - }) - pendingOperations.push({ - id, - operation, - timestamp, - tableName, + type: `insert`, + value: row, }) } - - // clear the current operations - await context.execute(`DELETE FROM ${trackedTableName}`) - commit() - pendingOperationStore.resolvePendingFor(pendingOperations) - }) + markReady() + }, }, - }, - { - signal: abortController.signal, - triggerImmediate: false, - tables: [trackedTableName], + }) + + // If the abort controller was aborted while processing the request above + if (abortController.signal.aborted) { + await disposeTracking() + } else { + abortController.signal.addEventListener(`abort`, () => { + disposeTracking() + }) } - ) - - const disposeTracking = await database.triggers.createDiffTrigger({ - source: tableName, - destination: trackedTableName, - when: { - [DiffTriggerOperation.INSERT]: `TRUE`, - [DiffTriggerOperation.UPDATE]: `TRUE`, - [DiffTriggerOperation.DELETE]: `TRUE`, - }, - hooks: { - beforeCreate: async (context) => { - begin() - for (const row of await context.getAll( - `SELECT * FROM ${tableName}` - )) { - write({ - type: `insert`, - value: row, - }) - } - commit() - markReady() - }, - }, - }) + } + + start() return () => { abortController.abort() - // We unfortunately cannot await this - disposeTracking() } }, // Expose the getSyncMetadata function diff --git a/packages/powersync-db-collection/tests/powersync.test.ts b/packages/powersync-db-collection/tests/powersync.test.ts index 3e66a0411..fdaecb148 100644 --- a/packages/powersync-db-collection/tests/powersync.test.ts +++ b/packages/powersync-db-collection/tests/powersync.test.ts @@ -29,18 +29,13 @@ describe(`PowerSync Integration`, () => { async function createDatabase() { const db = new PowerSyncDatabase({ database: { - dbFilename: `test-${randomUUID()}.sqlite`, + dbFilename: `test.sqlite`, dbLocation: tmpdir(), }, schema: APP_SCHEMA, }) onTestFinished(async () => { - /** - * We don't clear the DB here since that would cause deletes - * which would trigger collection updates while the DB is closing. - * We currently can't await the async cleanup of TanStack collections (since that method is not async). - * So we use unique temporary databases for each test. - */ + await db.disconnectAndClear() await db.close() }) return db @@ -67,6 +62,7 @@ describe(`PowerSync Integration`, () => { tableName: `documents`, }) ) + onTestFinished(() => collection.cleanup()) await collection.stateWhenReady() @@ -78,257 +74,342 @@ describe(`PowerSync Integration`, () => { `three`, ]) }) - }) - it(`should update when data syncs`, async () => { - const db = await createDatabase() - await createTestData(db) + it(`should update when data syncs`, async () => { + const db = await createDatabase() + await createTestData(db) - const collection = createCollection( - powerSyncCollectionOptions({ - database: db, - tableName: `documents`, - }) - ) - await collection.stateWhenReady() + const collection = createCollection( + powerSyncCollectionOptions({ + database: db, + tableName: `documents`, + }) + ) + onTestFinished(() => collection.cleanup()) - // Verify the collection state contains our items - expect(collection.size).toBe(3) + await collection.stateWhenReady() - // Make an update, simulates a sync from another client - await db.execute(` + // Verify the collection state contains our items + expect(collection.size).toBe(3) + + // Make an update, simulates a sync from another client + await db.execute(` INSERT into documents (id, name) VALUES (uuid(), 'four') `) - // The collection should update - await vi.waitFor( - () => { - expect(collection.size).toBe(4) - expect(collection.toArray.map((entry) => entry.name)).deep.equals([ - `one`, - `two`, - `three`, - `four`, - ]) - }, - { timeout: 1000 } - ) + // The collection should update + await vi.waitFor( + () => { + expect(collection.size).toBe(4) + expect(collection.toArray.map((entry) => entry.name)).deep.equals([ + `one`, + `two`, + `three`, + `four`, + ]) + }, + { timeout: 1000 } + ) - await db.execute(` + await db.execute(` DELETE from documents WHERE name = 'two' `) - // The collection should update - await vi.waitFor( - () => { - expect(collection.size).toBe(3) - expect(collection.toArray.map((entry) => entry.name)).deep.equals([ - `one`, - `three`, - `four`, - ]) - }, - { timeout: 1000 } - ) + // The collection should update + await vi.waitFor( + () => { + expect(collection.size).toBe(3) + expect(collection.toArray.map((entry) => entry.name)).deep.equals([ + `one`, + `three`, + `four`, + ]) + }, + { timeout: 1000 } + ) - await db.execute(` + await db.execute(` UPDATE documents SET name = 'updated' WHERE name = 'one' `) - // The collection should update - await vi.waitFor( - () => { - expect(collection.size).toBe(3) - expect(collection.toArray.map((entry) => entry.name)).deep.equals([ - `updated`, - `three`, - `four`, - ]) - }, - { timeout: 1000 } - ) - }) + // The collection should update + await vi.waitFor( + () => { + expect(collection.size).toBe(3) + expect(collection.toArray.map((entry) => entry.name)).deep.equals([ + `updated`, + `three`, + `four`, + ]) + }, + { timeout: 1000 } + ) + }) - it(`should propagate collection mutations to PowerSync`, async () => { - const db = await createDatabase() - await createTestData(db) + it(`should propagate collection mutations to PowerSync`, async () => { + const db = await createDatabase() + await createTestData(db) - const collection = createCollection( - powerSyncCollectionOptions({ - database: db, - tableName: `documents`, - }) - ) + const collection = createCollection( + powerSyncCollectionOptions({ + database: db, + tableName: `documents`, + }) + ) + onTestFinished(() => collection.cleanup()) - await collection.stateWhenReady() + await collection.stateWhenReady() - // Verify the collection state contains our items - expect(collection.size).toBe(3) + // Verify the collection state contains our items + expect(collection.size).toBe(3) - const id = randomUUID() - const tx = collection.insert({ - id, - name: `new`, - }) + const id = randomUUID() + const tx = collection.insert({ + id, + name: `new`, + }) - // The insert should optimistically update the collection - const newDoc = collection.get(id) - expect(newDoc).toBeDefined() - expect(newDoc!.name).toBe(`new`) + // The insert should optimistically update the collection + const newDoc = collection.get(id) + expect(newDoc).toBeDefined() + expect(newDoc!.name).toBe(`new`) - await tx.isPersisted.promise - // The item should now be present in PowerSync - // We should also have patched it back in to Tanstack DB (removing the optimistic state) + await tx.isPersisted.promise + // The item should now be present in PowerSync + // We should also have patched it back in to Tanstack DB (removing the optimistic state) - // Now do an update - await collection.update(id, (d) => (d.name = `updatedNew`)).isPersisted - .promise + // Now do an update + await collection.update(id, (d) => (d.name = `updatedNew`)).isPersisted + .promise - const updatedDoc = collection.get(id) - expect(updatedDoc).toBeDefined() - expect(updatedDoc!.name).toBe(`updatedNew`) + const updatedDoc = collection.get(id) + expect(updatedDoc).toBeDefined() + expect(updatedDoc!.name).toBe(`updatedNew`) - await collection.delete(id).isPersisted.promise + await collection.delete(id).isPersisted.promise - // There should be a crud entries for this - const _crudEntries = await db.getAll(` + // There should be a crud entries for this + const _crudEntries = await db.getAll(` SELECT * FROM ps_crud ORDER BY id`) - const crudEntries = _crudEntries.map((r) => CrudEntry.fromRow(r as any)) - - expect(crudEntries.length).toBe(6) - // We can only group transactions for similar operations - expect(crudEntries.map((e) => e.op)).toEqual([ - `PUT`, - `PUT`, - `PUT`, - `PUT`, - `PATCH`, - `DELETE`, - ]) - }) + const crudEntries = _crudEntries.map((r) => CrudEntry.fromRow(r as any)) + + expect(crudEntries.length).toBe(6) + // We can only group transactions for similar operations + expect(crudEntries.map((e) => e.op)).toEqual([ + `PUT`, + `PUT`, + `PUT`, + `PUT`, + `PATCH`, + `DELETE`, + ]) + }) - it(`should handle transactions`, async () => { - const db = await createDatabase() - await createTestData(db) + it(`should handle transactions`, async () => { + const db = await createDatabase() + await createTestData(db) - const collection = createCollection( - powerSyncCollectionOptions({ - database: db, - tableName: `documents`, + const collection = createCollection( + powerSyncCollectionOptions({ + database: db, + tableName: `documents`, + }) + ) + onTestFinished(() => collection.cleanup()) + + await collection.stateWhenReady() + + expect(collection.size).toBe(3) + + const addTx = createTransaction({ + autoCommit: false, + mutationFn: async ({ transaction }) => { + await new PowerSyncTransactor({ database: db }).applyTransaction( + transaction + ) + }, }) - ) - await collection.stateWhenReady() + addTx.mutate(() => { + for (let i = 0; i < 5; i++) { + collection.insert({ id: randomUUID(), name: `tx-${i}` }) + } + }) - expect(collection.size).toBe(3) + await addTx.commit() + await addTx.isPersisted.promise - const addTx = createTransaction({ - autoCommit: false, - mutationFn: async ({ transaction }) => { - await new PowerSyncTransactor({ database: db }).applyTransaction( - transaction - ) - }, - }) + expect(collection.size).toBe(8) - addTx.mutate(() => { - for (let i = 0; i < 5; i++) { - collection.insert({ id: randomUUID(), name: `tx-${i}` }) - } + // fetch the ps_crud items + // There should be a crud entries for this + const _crudEntries = await db.getAll(` + SELECT * FROM ps_crud ORDER BY id`) + const crudEntries = _crudEntries.map((r) => CrudEntry.fromRow(r as any)) + + const lastTransactionId = + crudEntries[crudEntries.length - 1]?.transactionId + /** + * The last items, created in the same transaction, should be in the same + * PowerSync transaction. + */ + expect( + crudEntries + .reverse() + .slice(0, 5) + .every((crudEntry) => crudEntry.transactionId == lastTransactionId) + ).true }) - await addTx.commit() - await addTx.isPersisted.promise + it(`should handle transactions with multiple collections`, async () => { + const db = await createDatabase() + await createTestData(db) - expect(collection.size).toBe(8) + const documentsCollection = createCollection( + powerSyncCollectionOptions({ + database: db, + tableName: `documents`, + }) + ) + onTestFinished(() => documentsCollection.cleanup()) - // fetch the ps_crud items - // There should be a crud entries for this - const _crudEntries = await db.getAll(` - SELECT * FROM ps_crud ORDER BY id`) - const crudEntries = _crudEntries.map((r) => CrudEntry.fromRow(r as any)) - - const lastTransactionId = crudEntries[crudEntries.length - 1]?.transactionId - /** - * The last items, created in the same transaction, should be in the same - * PowerSync transaction. - */ - expect( - crudEntries - .reverse() - .slice(0, 5) - .every((crudEntry) => crudEntry.transactionId == lastTransactionId) - ).true - }) + const usersCollection = createCollection( + powerSyncCollectionOptions({ + database: db, + tableName: `users`, + }) + ) + onTestFinished(() => usersCollection.cleanup()) - it(`should handle transactions with multiple collections`, async () => { - const db = await createDatabase() - await createTestData(db) + await documentsCollection.stateWhenReady() + await usersCollection.stateWhenReady() - const documentsCollection = createCollection( - powerSyncCollectionOptions({ - database: db, - tableName: `documents`, + expect(documentsCollection.size).toBe(3) + expect(usersCollection.size).toBe(0) + + const addTx = createTransaction({ + autoCommit: false, + mutationFn: async ({ transaction }) => { + await new PowerSyncTransactor({ database: db }).applyTransaction( + transaction + ) + }, }) - ) - const usersCollection = createCollection( - powerSyncCollectionOptions({ - database: db, - tableName: `users`, + addTx.mutate(() => { + for (let i = 0; i < 5; i++) { + documentsCollection.insert({ id: randomUUID(), name: `tx-${i}` }) + usersCollection.insert({ id: randomUUID(), name: `user` }) + } }) - ) - await documentsCollection.stateWhenReady() - await usersCollection.stateWhenReady() + await addTx.commit() + await addTx.isPersisted.promise - expect(documentsCollection.size).toBe(3) - expect(usersCollection.size).toBe(0) + expect(documentsCollection.size).toBe(8) + expect(usersCollection.size).toBe(5) - const addTx = createTransaction({ - autoCommit: false, - mutationFn: async ({ transaction }) => { - await new PowerSyncTransactor({ database: db }).applyTransaction( - transaction - ) - }, + // fetch the ps_crud items + // There should be a crud entries for this + const _crudEntries = await db.getAll(` + SELECT * FROM ps_crud ORDER BY id`) + const crudEntries = _crudEntries.map((r) => CrudEntry.fromRow(r as any)) + + const lastTransactionId = + crudEntries[crudEntries.length - 1]?.transactionId + /** + * The last items, created in the same transaction, should be in the same + * PowerSync transaction. + */ + expect( + crudEntries + .reverse() + .slice(0, 10) + .every((crudEntry) => crudEntry.transactionId == lastTransactionId) + ).true + }) + }) + + describe(`Multiple Clients`, async () => { + it(`should sync updates between multiple clients`, async () => { + const db = await createDatabase() + + // Create two collections for the same table + const collectionA = createCollection( + powerSyncCollectionOptions({ + database: db, + tableName: `documents`, + }) + ) + onTestFinished(() => collectionA.cleanup()) + await collectionA.stateWhenReady() + + const collectionB = createCollection( + powerSyncCollectionOptions({ + database: db, + tableName: `documents`, + }) + ) + onTestFinished(() => collectionB.cleanup()) + await collectionB.stateWhenReady() + + await createTestData(db) + + // Both collections should have the data present after insertion + await vi.waitFor( + () => { + expect(collectionA.size).eq(3) + expect(collectionB.size).eq(3) + }, + { timeout: 1000 } + ) }) + }) - addTx.mutate(() => { - for (let i = 0; i < 5; i++) { - documentsCollection.insert({ id: randomUUID(), name: `tx-${i}` }) - usersCollection.insert({ id: randomUUID(), name: `user` }) + describe(`Lifecycle`, async () => { + it(`should cleanup resources`, async () => { + const db = await createDatabase() + const collectionOptions = powerSyncCollectionOptions({ + database: db, + tableName: `documents`, + }) + + const meta = collectionOptions.utils.getMeta() + + const tableExists = async (): Promise => { + const result = await db.writeLock(async (tx) => { + return tx.get<{ count: number }>( + ` + SELECT COUNT(*) as count + FROM sqlite_temp_master + WHERE type='table' AND name = ? + `, + [meta.trackedTableName] + ) + }) + return result.count > 0 } - }) - await addTx.commit() - await addTx.isPersisted.promise + const collection = createCollection(collectionOptions) + await collection.stateWhenReady() + expect(await tableExists()).true - expect(documentsCollection.size).toBe(8) - expect(usersCollection.size).toBe(5) + await collection.cleanup() - // fetch the ps_crud items - // There should be a crud entries for this - const _crudEntries = await db.getAll(` - SELECT * FROM ps_crud ORDER BY id`) - const crudEntries = _crudEntries.map((r) => CrudEntry.fromRow(r as any)) - - const lastTransactionId = crudEntries[crudEntries.length - 1]?.transactionId - /** - * The last items, created in the same transaction, should be in the same - * PowerSync transaction. - */ - expect( - crudEntries - .reverse() - .slice(0, 10) - .every((crudEntry) => crudEntry.transactionId == lastTransactionId) - ).true + // It seems that even though `cleanup` is async, the sync disposer cannot be async + // We wait for the table to be deleted + await vi.waitFor( + async () => { + expect(await tableExists()).false + }, + { timeout: 1000 } + ) + }) }) }) From 7d9ff7341693dc229e9e2c4d031e2fce650239be Mon Sep 17 00:00:00 2001 From: stevensJourney Date: Thu, 2 Oct 2025 12:00:52 +0200 Subject: [PATCH 06/25] Add rollback test --- .../tests/powersync.test.ts | 49 +++++++++++++++++++ 1 file changed, 49 insertions(+) diff --git a/packages/powersync-db-collection/tests/powersync.test.ts b/packages/powersync-db-collection/tests/powersync.test.ts index fdaecb148..66e3d39ea 100644 --- a/packages/powersync-db-collection/tests/powersync.test.ts +++ b/packages/powersync-db-collection/tests/powersync.test.ts @@ -336,6 +336,55 @@ describe(`PowerSync Integration`, () => { }) }) + describe(`General use`, async () => { + it(`should rollback transactions on error`, async () => { + const db = await createDatabase() + + // Create two collections for the same table + const collection = createCollection( + powerSyncCollectionOptions({ + database: db, + tableName: `documents`, + }) + ) + onTestFinished(() => collection.cleanup()) + + const addTx = createTransaction({ + autoCommit: false, + mutationFn: async ({ transaction }) => { + await new PowerSyncTransactor({ database: db }).applyTransaction( + transaction + ) + }, + }) + + expect(collection.size).eq(0) + const id = randomUUID() + // Attempt to insert invalid data + // We can only do this since we aren't using schema validation here + addTx.mutate(() => { + collection.insert({ + id, + name: new Error() as unknown as string, // This will cause a SQL error eventually + }) + }) + + // This should be present in the optimisic state, but should be reverted when attempting to persist + expect(collection.size).eq(1) + expect((collection.get(id)?.name as any) instanceof Error).true + + try { + await addTx.commit() + await addTx.isPersisted.promise + expect.fail(`Should have thrown an error`) + } catch (error) { + expect(error).toBeDefined() + // The collection should be in a clean state + expect(collection.size).toBe(0) + } + }) + }) + describe(`Multiple Clients`, async () => { it(`should sync updates between multiple clients`, async () => { const db = await createDatabase() From d5b3d9995e2e00a67e2658b73911432d94ffd4ee Mon Sep 17 00:00:00 2001 From: stevensJourney Date: Thu, 2 Oct 2025 12:02:27 +0200 Subject: [PATCH 07/25] update dependencies --- packages/powersync-db-collection/package.json | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/packages/powersync-db-collection/package.json b/packages/powersync-db-collection/package.json index 9fa1f15be..5588491e4 100644 --- a/packages/powersync-db-collection/package.json +++ b/packages/powersync-db-collection/package.json @@ -3,14 +3,17 @@ "description": "PowerSync collection for TanStack DB", "version": "0.0.0", "dependencies": { - "@powersync/common": "^1.39.0", "@standard-schema/spec": "^1.0.0", "@tanstack/db": "workspace:*", "@tanstack/store": "^0.7.7", "debug": "^4.4.3", "p-defer": "^4.0.1" }, + "peerDependencies": { + "@powersync/common": "^1.39.0" + }, "devDependencies": { + "@powersync/common": "^1.39.0", "@powersync/better-sqlite3": "^0.2.0", "@powersync/node": "^0.11.0", "@types/debug": "^4.1.12", From cc42e946d4403aa275f371960d78a68d9319b064 Mon Sep 17 00:00:00 2001 From: stevensJourney Date: Thu, 2 Oct 2025 12:37:13 +0200 Subject: [PATCH 08/25] Add live query test --- .../tests/powersync.test.ts | 62 ++++++++++++++++++- 1 file changed, 61 insertions(+), 1 deletion(-) diff --git a/packages/powersync-db-collection/tests/powersync.test.ts b/packages/powersync-db-collection/tests/powersync.test.ts index 66e3d39ea..e3c82cf26 100644 --- a/packages/powersync-db-collection/tests/powersync.test.ts +++ b/packages/powersync-db-collection/tests/powersync.test.ts @@ -7,7 +7,12 @@ import { Table, column, } from "@powersync/node" -import { createCollection, createTransaction } from "@tanstack/db" +import { + createCollection, + createTransaction, + eq, + liveQueryCollectionOptions, +} from "@tanstack/db" import { describe, expect, it, onTestFinished, vi } from "vitest" import { powerSyncCollectionOptions } from "../src" import { PowerSyncTransactor } from "../src/PowerSyncTransactor" @@ -383,6 +388,61 @@ describe(`PowerSync Integration`, () => { expect(collection.size).toBe(0) } }) + + it(`should work with live queries`, async () => { + const db = await createDatabase() + + // Create two collections for the same table + const collection = createCollection( + powerSyncCollectionOptions({ + database: db, + tableName: `documents`, + }) + ) + onTestFinished(() => collection.cleanup()) + + await collection.stateWhenReady() + + const liveDocuments = createCollection( + liveQueryCollectionOptions({ + query: (q) => + q + .from({ document: collection }) + .where(({ document }) => eq(document.name, `book`)) + .select(({ document }) => ({ + id: document.id, + name: document.name, + })), + }) + ) + + expect(liveDocuments.size).eq(0) + + const bookNames = new Set() + + liveDocuments.subscribeChanges((changes) => { + changes + .map((change) => change.value.name) + .forEach((change) => bookNames.add(change!)) + }) + + await collection.insert({ + id: randomUUID(), + name: `notabook`, + }).isPersisted.promise + await collection.insert({ + id: randomUUID(), + name: `book`, + }).isPersisted.promise + + expect(collection.size).eq(2) + await vi.waitFor( + () => { + expect(Array.from(bookNames)).deep.equals([`book`]) + }, + { timeout: 1000 } + ) + }) }) describe(`Multiple Clients`, async () => { From d1de549884315d666b6a68911a5c6ce0e683d8fc Mon Sep 17 00:00:00 2001 From: stevensJourney Date: Thu, 2 Oct 2025 12:45:52 +0200 Subject: [PATCH 09/25] Add docs for PowerSync collection --- docs/collections/powersync-collection.md | 175 +++++++++++++++++++++++ 1 file changed, 175 insertions(+) create mode 100644 docs/collections/powersync-collection.md diff --git a/docs/collections/powersync-collection.md b/docs/collections/powersync-collection.md new file mode 100644 index 000000000..21aef4f30 --- /dev/null +++ b/docs/collections/powersync-collection.md @@ -0,0 +1,175 @@ +--- +title: PowerSync Collection +--- + +# PowerSync Collection + +PowerSync collections provide seamless integration between TanStack DB and [PowerSync](https://powersync.com), enabling automatic synchronization between your in-memory TanStack DB collections and PowerSync's SQLite database. This gives you offline-ready persistence, real-time sync capabilities, and powerful conflict resolution. + +## Overview + +The `@tanstack/powersync-db-collection` package allows you to create collections that: + +- Automatically mirror the state of an underlying PowerSync SQLite database +- Reactively update when PowerSync records change +- Support optimistic mutations with rollback on error +- Provide persistence handlers to keep PowerSync in sync with TanStack DB transactions +- Use PowerSync's efficient SQLite-based storage engine +- Work with PowerSync's real-time sync features for offline-first scenarios +- Leverage PowerSync's built-in conflict resolution and data consistency guarantees +- Enable real-time synchronization with PostgreSQL, MongoDB and MySQL backends + +## 1. Installation + +Install the PowerSync collection package along with your preferred framework integration. +PowerSync currently works with Web, React Native and Node.js. The examples below use the Web SDK. +See the PowerSync quickstart [docs](https://docs.powersync.com/installation/quickstart-guide) for more details. + +```bash +npm install @tanstack/powersync-db-collection @powersync/web @journeyapps/wa-sqlite +``` + +### 2. Create a PowerSync Database and Schema + +```ts +import { Schema, Table, column } from "@powersync/web" + +// Define your schema +const APP_SCHEMA = new Schema({ + documents: new Table({ + name: column.text, + content: column.text, + created_at: column.text, + updated_at: column.text, + }), +}) + +type Document = (typeof APP_SCHEMA)["types"]["documents"] + +// Initialize PowerSync database +const db = new PowerSyncDatabase({ + database: { + dbFilename: "app.sqlite", + }, + schema: APP_SCHEMA, +}) +``` + +### 3. (optional) Configure Sync with a Backend + +```ts +import { + AbstractPowerSyncDatabase, + PowerSyncBackendConnector, + PowerSyncCredentials, +} from "@powersync/web" + +// TODO implement your logic here +class Connector implements PowerSyncBackendConnector { + fetchCredentials: () => Promise + + /** Upload local changes to the app backend. + * + * Use {@link AbstractPowerSyncDatabase.getCrudBatch} to get a batch of changes to upload. + * + * Any thrown errors will result in a retry after the configured wait period (default: 5 seconds). + */ + uploadData: (database: AbstractPowerSyncDatabase) => Promise +} + +// Configure the client to connect to a PowerSync service and your backend +db.connect(new Connector()) +``` + +### 4. Create a TanStack DB Collection + +```ts +import { createCollection } from "@tanstack/react-db" +import { powerSyncCollectionOptions } from "@tanstack/powersync-db-collection" + +const documentsCollection = createCollection( + powerSyncCollectionOptions({ + database: db, + tableName: "documents", + }) +) +``` + +## Features + +### Offline-First + +PowerSync collections are offline-first by default. All data is stored locally in a SQLite database, allowing your app to work without an internet connection. Changes are automatically synced when connectivity is restored. + +### Real-Time Sync + +When connected to a PowerSync backend, changes are automatically synchronized in real-time across all connected clients. The sync process handles: + +- Bi-directional sync with the server +- Conflict resolution +- Queue management for offline changes +- Automatic retries on connection loss + +### Optimistic Updates + +Updates to the collection are applied optimistically to the local state first, then synchronized with PowerSync and the backend. If an error occurs during sync, the changes are automatically rolled back. + +## Configuration Options + +The `powerSyncCollectionOptions` function accepts the following options: + +```ts +interface PowerSyncCollectionConfig { + database: PowerSyncDatabase // PowerSync database instance + tableName: string // Name of the table in PowerSync + schema?: Schema // Optional schema for validation +} +``` + +## Advanced Transactions + +When you need more control over transaction handling, such as batching multiple operations or handling complex transaction scenarios, you can use PowerSync's transaction system directly with TanStack DB transactions. + +```ts +import { createTransaction } from "@tanstack/react-db" +import { PowerSyncTransactor } from "@tanstack/powersync-db-collection" + +// Create a transaction that won't auto-commit +const batchTx = createTransaction({ + autoCommit: false, + mutationFn: async ({ transaction }) => { + // Use PowerSyncTransactor to apply the transaction to PowerSync + await new PowerSyncTransactor({ database: db }).applyTransaction( + transaction + ) + }, +}) + +// Perform multiple operations in the transaction +batchTx.mutate(() => { + // Add multiple documents in a single transaction + for (let i = 0; i < 5; i++) { + documentsCollection.insert({ + id: crypto.randomUUID(), + name: `Document ${i}`, + content: `Content ${i}`, + created_at: new Date().toISOString(), + updated_at: new Date().toISOString(), + }) + } +}) + +// Commit the transaction +await batchTx.commit() + +// Wait for the changes to be persisted +await batchTx.isPersisted.promise +``` + +This approach allows you to: + +- Batch multiple operations into a single transaction +- Control when the transaction is committed +- Ensure all operations are atomic +- Wait for persistence confirmation +- Handle complex transaction scenarios From ccba6ef234ee73b00631176cf543d4f102e7430c Mon Sep 17 00:00:00 2001 From: stevensJourney Date: Thu, 2 Oct 2025 12:57:23 +0200 Subject: [PATCH 10/25] Add Changeset --- .changeset/dark-items-dig.md | 5 +++++ 1 file changed, 5 insertions(+) create mode 100644 .changeset/dark-items-dig.md diff --git a/.changeset/dark-items-dig.md b/.changeset/dark-items-dig.md new file mode 100644 index 000000000..d11921c5e --- /dev/null +++ b/.changeset/dark-items-dig.md @@ -0,0 +1,5 @@ +--- +"@tanstack/powersync-db-collection": minor +--- + +Initial Release From c887d90ed5b08e60520b42cbcd06f36f26ca1205 Mon Sep 17 00:00:00 2001 From: stevensJourney Date: Thu, 2 Oct 2025 15:04:03 +0200 Subject: [PATCH 11/25] Added schema conversion and validation --- docs/collections/powersync-collection.md | 27 +++ packages/powersync-db-collection/src/index.ts | 1 + .../powersync-db-collection/src/schema.ts | 197 +++++++++++++++++ .../tests/powersync.test.ts | 44 +++- .../tests/schema.test.ts | 198 ++++++++++++++++++ pnpm-lock.yaml | 6 +- 6 files changed, 469 insertions(+), 4 deletions(-) create mode 100644 packages/powersync-db-collection/src/schema.ts create mode 100644 packages/powersync-db-collection/tests/schema.test.ts diff --git a/docs/collections/powersync-collection.md b/docs/collections/powersync-collection.md index 21aef4f30..0274a838c 100644 --- a/docs/collections/powersync-collection.md +++ b/docs/collections/powersync-collection.md @@ -83,6 +83,10 @@ db.connect(new Connector()) ### 4. Create a TanStack DB Collection +There are two ways to create a collection: using type inference or using schema validation. + +#### Option 1: Using Type Inference + ```ts import { createCollection } from "@tanstack/react-db" import { powerSyncCollectionOptions } from "@tanstack/powersync-db-collection" @@ -95,6 +99,29 @@ const documentsCollection = createCollection( ) ``` +#### Option 2: Using Schema Validation + +```ts +import { createCollection } from "@tanstack/react-db" +import { + powerSyncCollectionOptions, + convertPowerSyncSchemaToSpecs, +} from "@tanstack/powersync-db-collection" + +// Convert PowerSync schema to TanStack DB schema +const schemas = convertPowerSyncSchemaToSpecs(APP_SCHEMA) + +const documentsCollection = createCollection( + powerSyncCollectionOptions({ + database: db, + tableName: "documents", + schema: schemas.documents, // Use schema for runtime type validation + }) +) +``` + +With schema validation, the collection will validate all inputs at runtime to ensure they match the PowerSync schema types. This provides an extra layer of type safety beyond TypeScript's compile-time checks. + ## Features ### Offline-First diff --git a/packages/powersync-db-collection/src/index.ts b/packages/powersync-db-collection/src/index.ts index 6c8111f4c..152f09076 100644 --- a/packages/powersync-db-collection/src/index.ts +++ b/packages/powersync-db-collection/src/index.ts @@ -1,3 +1,4 @@ export * from "./definitions" export * from "./powersync" export * from "./PowerSyncTransactor" +export * from "./schema" diff --git a/packages/powersync-db-collection/src/schema.ts b/packages/powersync-db-collection/src/schema.ts new file mode 100644 index 000000000..dc894bf58 --- /dev/null +++ b/packages/powersync-db-collection/src/schema.ts @@ -0,0 +1,197 @@ +import { ColumnType } from "@powersync/common" +import type { + ColumnsType, + ExtractColumnValueType, + Schema, + Table, +} from "@powersync/common" +import type { StandardSchemaV1 } from "@standard-schema/spec" + +/** + * Utility type that extracts the typed structure of a table based on its column definitions. + * Maps each column to its corresponding TypeScript type using ExtractColumnValueType. + * + * @template Columns - The ColumnsType definition containing column configurations + * @example + * ```typescript + * const table = new Table({ + * name: column.text, + * age: column.integer + * }) + * type TableType = ExtractedTable + * // Results in: { name: string | null, age: number | null } + * ``` + */ +type ExtractedTable = { + [K in keyof Columns]: ExtractColumnValueType +} & { + id: string +} + +/** + * Converts a PowerSync Table instance to a StandardSchemaV1 schema. + * Creates a schema that validates the structure and types of table records + * according to the PowerSync table definition. + * + * @template Columns - The ColumnsType definition containing column configurations + * @param table - The PowerSync Table instance to convert + * @returns A StandardSchemaV1 compatible schema with proper type validation + * + * @example + * ```typescript + * const usersTable = new Table({ + * name: column.text, + * age: column.integer + * }) + * + * const schema = convertTableToSchema(usersTable) + * // Now you can use this schema with powerSyncCollectionOptions + * const collection = createCollection( + * powerSyncCollectionOptions({ + * database: db, + * tableName: "users", + * schema: schema + * }) + * ) + * ``` + */ +export function convertTableToSchema( + table: Table +): StandardSchemaV1> { + // Create validate function that checks types according to column definitions + const validate = ( + value: unknown + ): + | StandardSchemaV1.SuccessResult> + | StandardSchemaV1.FailureResult => { + if (typeof value != `object` || value == null) { + return { + issues: [ + { + message: `Value must be an object`, + }, + ], + } + } + + const issues: Array = [] + + // Check id field + if (!(`id` in value) || typeof (value as any).id != `string`) { + issues.push({ + message: `id field must be a string`, + path: [`id`], + }) + } + + // Check each column + for (const column of table.columns) { + const val = (value as ExtractedTable)[column.name] + + if (val == null) { + continue + } + + switch (column.type) { + case ColumnType.TEXT: + if (typeof val != `string`) { + issues.push({ + message: `${column.name} must be a string or null`, + path: [column.name], + }) + } + break + case ColumnType.INTEGER: + case ColumnType.REAL: + if (typeof val != `number`) { + issues.push({ + message: `${column.name} must be a number or null`, + path: [column.name], + }) + } + break + } + } + + if (issues.length > 0) { + return { issues } + } + + return { value: { ...value } as ExtractedTable } + } + + return { + "~standard": { + version: 1, + vendor: `powersync`, + validate, + types: { + input: {} as ExtractedTable, + output: {} as ExtractedTable, + }, + }, + } +} + +/** + * Converts an entire PowerSync Schema (containing multiple tables) into a collection of StandardSchemaV1 schemas. + * Each table in the schema is converted to its own StandardSchemaV1 schema while preserving all type information. + * + * @template Tables - A record type mapping table names to their Table definitions + * @param schema - The PowerSync Schema containing multiple table definitions + * @returns An object where each key is a table name and each value is that table's StandardSchemaV1 schema + * + * @example + * ```typescript + * const mySchema = new Schema({ + * users: new Table({ + * name: column.text, + * age: column.integer + * }), + * posts: new Table({ + * title: column.text, + * views: column.integer + * }) + * }) + * + * const standardizedSchemas = convertSchemaToSpecs(mySchema) + * // Result has type: + * // { + * // users: StandardSchemaV1<{ name: string | null, age: number | null }>, + * // posts: StandardSchemaV1<{ title: string | null, views: number | null }> + * // } + * + * // Can be used with collections: + * const usersCollection = createCollection( + * powerSyncCollectionOptions({ + * database: db, + * tableName: "users", + * schema: standardizedSchemas.users + * }) + * ) + * ``` + */ +export function convertPowerSyncSchemaToSpecs< + Tables extends Record>, +>( + schema: Schema +): { + [TableName in keyof Tables]: StandardSchemaV1< + ExtractedTable + > +} { + // Create a map to store the standardized schemas + const standardizedSchemas = {} as { + [TableName in keyof Tables]: StandardSchemaV1< + ExtractedTable + > + } + + // Iterate through each table in the schema + schema.tables.forEach((table) => { + // Convert each table to a StandardSchemaV1 and store it in the result map + ;(standardizedSchemas as any)[table.name] = convertTableToSchema(table) + }) + + return standardizedSchemas +} diff --git a/packages/powersync-db-collection/tests/powersync.test.ts b/packages/powersync-db-collection/tests/powersync.test.ts index e3c82cf26..9c9b95ae0 100644 --- a/packages/powersync-db-collection/tests/powersync.test.ts +++ b/packages/powersync-db-collection/tests/powersync.test.ts @@ -8,6 +8,7 @@ import { column, } from "@powersync/node" import { + SchemaValidationError, createCollection, createTransaction, eq, @@ -16,6 +17,7 @@ import { import { describe, expect, it, onTestFinished, vi } from "vitest" import { powerSyncCollectionOptions } from "../src" import { PowerSyncTransactor } from "../src/PowerSyncTransactor" +import { convertPowerSyncSchemaToSpecs } from "../src/schema" import type { AbstractPowerSyncDatabase } from "@powersync/node" const APP_SCHEMA = new Schema({ @@ -43,6 +45,8 @@ describe(`PowerSync Integration`, () => { await db.disconnectAndClear() await db.close() }) + // Initial clear in case a test might have failed + await db.disconnectAndClear() return db } @@ -56,6 +60,44 @@ describe(`PowerSync Integration`, () => { `) } + describe(`schema`, () => { + it(`should accept a schema`, async () => { + const db = await createDatabase() + + // the collection should infer types and validate with the schema + const collection = createCollection( + powerSyncCollectionOptions({ + database: db, + tableName: `documents`, + schema: convertPowerSyncSchemaToSpecs(APP_SCHEMA).documents, + }) + ) + + collection.insert({ + id: randomUUID(), + name: `aname`, + }) + + collection.insert({ + id: randomUUID(), + name: null, + }) + + expect(collection.size).eq(2) + + // should validate inputs + try { + collection.insert({} as any) + console.log(`failed`) + } catch (ex) { + expect(ex instanceof SchemaValidationError).true + if (ex instanceof SchemaValidationError) { + expect(ex.message).contains(`id field must be a string`) + } + } + }) + }) + describe(`sync`, () => { it(`should initialize and fetch initial data`, async () => { const db = await createDatabase() @@ -341,7 +383,7 @@ describe(`PowerSync Integration`, () => { }) }) - describe(`General use`, async () => { + describe(`General use`, () => { it(`should rollback transactions on error`, async () => { const db = await createDatabase() diff --git a/packages/powersync-db-collection/tests/schema.test.ts b/packages/powersync-db-collection/tests/schema.test.ts new file mode 100644 index 000000000..62c562b76 --- /dev/null +++ b/packages/powersync-db-collection/tests/schema.test.ts @@ -0,0 +1,198 @@ +import { Schema, Table, column } from "@powersync/common" +import { describe, expect, it } from "vitest" +import { + convertPowerSyncSchemaToSpecs, + convertTableToSchema, +} from "../src/schema" +import type { StandardSchemaV1 } from "@standard-schema/spec" + +describe(`Schema Conversion`, () => { + describe(`convertTableToSchema`, () => { + it(`should convert a simple table with text and integer columns`, () => { + const table = new Table({ + name: column.text, + age: column.integer, + }) + + const schema = convertTableToSchema(table) + + // Test schema structure + expect(schema).toHaveProperty(`~standard`) + expect(schema[`~standard`].version).toBe(1) + expect(schema[`~standard`].vendor).toBe(`powersync`) + expect(schema[`~standard`].validate).toBeTypeOf(`function`) + + // Test validation with valid data + const validResult = schema[`~standard`].validate({ + id: `123`, + name: `John`, + age: 25, + }) as StandardSchemaV1.SuccessResult + + expect(validResult.issues).toBeUndefined() + expect(validResult.value).toEqual({ + id: `123`, + name: `John`, + age: 25, + }) + + // Test validation with invalid data + const invalidResult = schema[`~standard`].validate({ + id: `123`, + name: 123, // wrong type + age: `25`, // wrong type + }) as StandardSchemaV1.FailureResult + + expect(invalidResult.issues).toHaveLength(2) + expect(invalidResult.issues[0]?.message).toContain(`must be a string`) + expect(invalidResult.issues[1]?.message).toContain(`must be a number`) + }) + + it(`should handle null values correctly`, () => { + const table = new Table({ + name: column.text, + age: column.integer, + }) + + const schema = convertTableToSchema(table) + + // Test validation with null values + const result = schema[`~standard`].validate({ + id: `123`, + name: null, + age: null, + }) as StandardSchemaV1.SuccessResult + + expect(result.issues).toBeUndefined() + expect(result.value).toEqual({ + id: `123`, + name: null, + age: null, + }) + }) + + it(`should require id field`, () => { + const table = new Table({ + name: column.text, + }) + + const schema = convertTableToSchema(table) + + // Test validation without id + const result = schema[`~standard`].validate({ + name: `John`, + }) as StandardSchemaV1.FailureResult + + expect(result.issues).toHaveLength(1) + expect(result.issues[0]?.message).toContain(`id field must be a string`) + }) + + it(`should handle all column types`, () => { + const table = new Table({ + text_col: column.text, + int_col: column.integer, + real_col: column.real, + }) + + const schema = convertTableToSchema(table) + + // Test validation with all types + const result = schema[`~standard`].validate({ + id: `123`, + text_col: `text`, + int_col: 42, + real_col: 3.14, + }) as StandardSchemaV1.SuccessResult + + expect(result.issues).toBeUndefined() + expect(result.value).toEqual({ + id: `123`, + text_col: `text`, + int_col: 42, + real_col: 3.14, + }) + }) + }) + + describe(`convertPowerSyncSchemaToSpecs`, () => { + it(`should convert multiple tables in a schema`, () => { + const schema = new Schema({ + users: new Table({ + name: column.text, + age: column.integer, + }), + posts: new Table({ + title: column.text, + views: column.integer, + }), + }) + + const result = convertPowerSyncSchemaToSpecs(schema) + + // Test structure + expect(result).toHaveProperty(`users`) + expect(result).toHaveProperty(`posts`) + + // Test users table schema + const userValidResult = result.users[`~standard`].validate({ + id: `123`, + name: `John`, + age: 25, + }) as StandardSchemaV1.SuccessResult + + expect(userValidResult.issues).toBeUndefined() + expect(userValidResult.value).toEqual({ + id: `123`, + name: `John`, + age: 25, + }) + + // Test posts table schema + const postValidResult = result.posts[`~standard`].validate({ + id: `456`, + title: `Hello`, + views: 100, + }) as StandardSchemaV1.SuccessResult + + expect(postValidResult.issues).toBeUndefined() + expect(postValidResult.value).toEqual({ + id: `456`, + title: `Hello`, + views: 100, + }) + }) + + it(`should handle empty schema`, () => { + const schema = new Schema({}) + const result = convertPowerSyncSchemaToSpecs(schema) + expect(result).toEqual({}) + }) + + it(`should validate each table independently`, () => { + const schema = new Schema({ + users: new Table({ + name: column.text, + }), + posts: new Table({ + views: column.integer, + }), + }) + + const result = convertPowerSyncSchemaToSpecs(schema) + + // Test that invalid data in one table doesn't affect the other + const userInvalidResult = result.users[`~standard`].validate({ + id: `123`, + name: 42, // wrong type + }) as StandardSchemaV1.FailureResult + + const postValidResult = result.posts[`~standard`].validate({ + id: `456`, + views: 100, + }) as StandardSchemaV1.SuccessResult + + expect(userInvalidResult.issues).toHaveLength(1) + expect(postValidResult.issues).toBeUndefined() + }) + }) +}) diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index eade117ab..3661de7ff 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -652,9 +652,6 @@ importers: packages/powersync-db-collection: dependencies: - '@powersync/common': - specifier: ^1.39.0 - version: 1.39.0 '@standard-schema/spec': specifier: ^1.0.0 version: 1.0.0 @@ -674,6 +671,9 @@ importers: '@powersync/better-sqlite3': specifier: ^0.2.0 version: 0.2.0 + '@powersync/common': + specifier: ^1.39.0 + version: 1.39.0 '@powersync/node': specifier: ^0.11.0 version: 0.11.0(@powersync/common@1.39.0) From 860fa2679d3d49b8a2677bdfbfe4af2938ca65d9 Mon Sep 17 00:00:00 2001 From: stevensJourney Date: Thu, 2 Oct 2025 18:00:20 +0200 Subject: [PATCH 12/25] ensure observers are ready before proceeding with mutations --- .../src/PowerSyncTransactor.ts | 19 ++++++++++++++++--- .../powersync-db-collection/src/powersync.ts | 3 ++- 2 files changed, 18 insertions(+), 4 deletions(-) diff --git a/packages/powersync-db-collection/src/PowerSyncTransactor.ts b/packages/powersync-db-collection/src/PowerSyncTransactor.ts index fade901c1..1c8b7445a 100644 --- a/packages/powersync-db-collection/src/PowerSyncTransactor.ts +++ b/packages/powersync-db-collection/src/PowerSyncTransactor.ts @@ -70,18 +70,31 @@ export class PowerSyncTransactor> { * The transaction might contain ops for different collections. * We can do some optimizations for single collection transactions. */ - const mutationsCollections = mutations.map( + const mutationsCollectionIds = mutations.map( (mutation) => mutation.collection.id ) - const collectionIds = Array.from(new Set(mutationsCollections)) + const collectionIds = Array.from(new Set(mutationsCollectionIds)) const lastCollectionMutationIndexes = new Map() + const allCollections = collectionIds + .map((id) => mutations.find((mutation) => mutation.collection.id == id)!) + .map((mutation) => mutation.collection) for (const collectionId of collectionIds) { lastCollectionMutationIndexes.set( collectionId, - mutationsCollections.lastIndexOf(collectionId) + mutationsCollectionIds.lastIndexOf(collectionId) ) } + // Check all the observers are ready before taking a lock + await Promise.all( + allCollections.map(async (collection) => { + if (collection.isReady()) { + return + } + await new Promise((resolve) => collection.onFirstReady(resolve)) + }) + ) + // Persist to PowerSync const { whenComplete } = await this.database.writeTransaction( async (tx) => { diff --git a/packages/powersync-db-collection/src/powersync.ts b/packages/powersync-db-collection/src/powersync.ts index 694424077..a50115abf 100644 --- a/packages/powersync-db-collection/src/powersync.ts +++ b/packages/powersync-db-collection/src/powersync.ts @@ -133,7 +133,6 @@ export function powerSyncCollectionOptions< const sync: SyncConfig = { sync: (params) => { const { begin, write, commit, markReady } = params - // Manually create a tracking operation for optimization purposes const abortController = new AbortController() @@ -237,6 +236,8 @@ export function powerSyncCollectionOptions< const outputConfig: EnhancedPowerSyncCollectionConfig = { ...restConfig, getKey, + // Syncing should start immediately since we need to monitor the changes for mutations + startSync: true, sync, onInsert: async (params) => { // The transaction here should only ever contain a single insert mutation From ffa68d1d66b6bb4428ac3f9f869f22fd4f7bc946 Mon Sep 17 00:00:00 2001 From: stevensJourney Date: Fri, 3 Oct 2025 09:24:28 +0200 Subject: [PATCH 13/25] Add logging --- .../powersync-db-collection/src/powersync.ts | 90 +++++++++++-------- 1 file changed, 52 insertions(+), 38 deletions(-) diff --git a/packages/powersync-db-collection/src/powersync.ts b/packages/powersync-db-collection/src/powersync.ts index a50115abf..fcd146669 100644 --- a/packages/powersync-db-collection/src/powersync.ts +++ b/packages/powersync-db-collection/src/powersync.ts @@ -128,55 +128,61 @@ export function powerSyncCollectionOptions< * "sync" * Notice that this describes the Sync between the local SQLite table * and the in-memory tanstack-db collection. - * It is not about sync between a client and a server! */ const sync: SyncConfig = { sync: (params) => { const { begin, write, commit, markReady } = params - // Manually create a tracking operation for optimization purposes const abortController = new AbortController() // The sync function needs to be synchronous async function start() { + database.logger.info(`Sync is starting`) database.onChangeWithCallback( { onChange: async () => { - await database.writeTransaction(async (context) => { - begin() - const operations = await context.getAll( - `SELECT * FROM ${trackedTableName} ORDER BY timestamp ASC` - ) - const pendingOperations: Array = [] + await database + .writeTransaction(async (context) => { + begin() + const operations = await context.getAll( + `SELECT * FROM ${trackedTableName} ORDER BY timestamp ASC` + ) + const pendingOperations: Array = [] - for (const op of operations) { - const { id, operation, timestamp, value } = op - const parsedValue = { - id, - ...JSON.parse(value), + for (const op of operations) { + const { id, operation, timestamp, value } = op + const parsedValue = { + id, + ...JSON.parse(value), + } + const parsedPreviousValue = + op.operation == DiffTriggerOperation.UPDATE + ? { id, ...JSON.parse(op.previous_value) } + : null + write({ + type: mapOperation(operation), + value: parsedValue, + previousValue: parsedPreviousValue, + }) + pendingOperations.push({ + id, + operation, + timestamp, + tableName, + }) } - const parsedPreviousValue = - op.operation == DiffTriggerOperation.UPDATE - ? { id, ...JSON.parse(op.previous_value) } - : null - write({ - type: mapOperation(operation), - value: parsedValue, - previousValue: parsedPreviousValue, - }) - pendingOperations.push({ - id, - operation, - timestamp, - tableName, - }) - } - // clear the current operations - await context.execute(`DELETE FROM ${trackedTableName}`) + // clear the current operations + await context.execute(`DELETE FROM ${trackedTableName}`) - commit() - pendingOperationStore.resolvePendingFor(pendingOperations) - }) + commit() + pendingOperationStore.resolvePendingFor(pendingOperations) + }) + .catch((error) => { + database.logger.error( + `An error has been detected in the sync handler`, + error + ) + }) }, }, { @@ -207,6 +213,7 @@ export function powerSyncCollectionOptions< } commit() markReady() + database.logger.info(`Sync is ready`) }, }, }) @@ -215,15 +222,22 @@ export function powerSyncCollectionOptions< if (abortController.signal.aborted) { await disposeTracking() } else { - abortController.signal.addEventListener(`abort`, () => { - disposeTracking() - }) + abortController.signal.addEventListener( + `abort`, + () => { + disposeTracking() + }, + { once: true } + ) } } - start() + start().catch((error) => + database.logger.error(`Could not start syncing process`, error) + ) return () => { + database.logger.info(`Sync has been stopped`) abortController.abort() } }, From 79abf05564129ab1c3743c6d47030621a7689ac6 Mon Sep 17 00:00:00 2001 From: stevensJourney Date: Fri, 3 Oct 2025 09:37:29 +0200 Subject: [PATCH 14/25] Implement batching during initial sync --- .../src/definitions.ts | 18 +++++++ .../powersync-db-collection/src/powersync.ts | 50 ++++++++++++------- 2 files changed, 50 insertions(+), 18 deletions(-) diff --git a/packages/powersync-db-collection/src/definitions.ts b/packages/powersync-db-collection/src/definitions.ts index 7af17a065..69f3b243f 100644 --- a/packages/powersync-db-collection/src/definitions.ts +++ b/packages/powersync-db-collection/src/definitions.ts @@ -46,6 +46,19 @@ export type PowerSyncCollectionConfig< tableName: string /** The PowerSync database instance */ database: AbstractPowerSyncDatabase + /** + * The maximum number of documents to read from the SQLite table + * in a single batch during the initial sync between PowerSync and the + * in-memory TanStack DB collection. + * + * @remarks + * - Defaults to {@link DEFAULT_BATCH_SIZE} if not specified. + * - Larger values reduce the number of round trips to the storage + * engine but increase memory usage per batch. + * - Smaller values may lower memory usage and allow earlier + * streaming of initial results, at the cost of more query calls. + */ + syncBatchSize?: number } export type PowerSyncCollectionMeta = { @@ -71,3 +84,8 @@ export type EnhancedPowerSyncCollectionConfig< export type PowerSyncCollectionUtils = { getMeta: () => PowerSyncCollectionMeta } + +/** + * Default value for {@link PowerSyncCollectionConfig#syncBatchSize} + */ +export const DEFAULT_BATCH_SIZE = 1000 diff --git a/packages/powersync-db-collection/src/powersync.ts b/packages/powersync-db-collection/src/powersync.ts index fcd146669..86a69d07c 100644 --- a/packages/powersync-db-collection/src/powersync.ts +++ b/packages/powersync-db-collection/src/powersync.ts @@ -1,20 +1,21 @@ -import { DiffTriggerOperation } from "@powersync/common" +import { DiffTriggerOperation, sanitizeSQL } from "@powersync/common" +import { DEFAULT_BATCH_SIZE } from "./definitions" import { asPowerSyncRecord, mapOperation } from "./helpers" import { PendingOperationStore } from "./PendingOperationStore" import { PowerSyncTransactor } from "./PowerSyncTransactor" -import type { TriggerDiffRecord } from "@powersync/common" -import type { StandardSchemaV1 } from "@standard-schema/spec" -import type { - CollectionConfig, - InferSchemaOutput, - SyncConfig, -} from "@tanstack/db" import type { EnhancedPowerSyncCollectionConfig, PowerSyncCollectionConfig, PowerSyncCollectionUtils, } from "./definitions" import type { PendingOperation } from "./PendingOperationStore" +import type { + CollectionConfig, + InferSchemaOutput, + SyncConfig, +} from "@tanstack/db" +import type { StandardSchemaV1 } from "@standard-schema/spec" +import type { TriggerDiffRecord } from "@powersync/common" /** * Creates PowerSync collection options for use with a standard Collection @@ -100,7 +101,12 @@ export function powerSyncCollectionOptions< >( config: PowerSyncCollectionConfig ): EnhancedPowerSyncCollectionConfig { - const { database, tableName, ...restConfig } = config + const { + database, + tableName, + syncBatchSize = DEFAULT_BATCH_SIZE, + ...restConfig + } = config /** * The onInsert, onUpdate, onDelete handlers should only return @@ -202,16 +208,24 @@ export function powerSyncCollectionOptions< }, hooks: { beforeCreate: async (context) => { - begin() - for (const row of await context.getAll( - `SELECT * FROM ${tableName}` - )) { - write({ - type: `insert`, - value: row, - }) + let currentBatchCount = syncBatchSize + let cursor = 0 + while (currentBatchCount == syncBatchSize) { + begin() + const batchItems = await context.getAll( + sanitizeSQL`SELECT * FROM ${tableName} LIMIT ? OFFSET ?`, + [syncBatchSize, cursor] + ) + currentBatchCount = batchItems.length + cursor += currentBatchCount + for (const row of batchItems) { + write({ + type: `insert`, + value: row, + }) + } + commit() } - commit() markReady() database.logger.info(`Sync is ready`) }, From 237ed35cd277adde349e3737859e8f1272fd92d6 Mon Sep 17 00:00:00 2001 From: stevensJourney Date: Fri, 3 Oct 2025 10:52:52 +0200 Subject: [PATCH 15/25] Update log messages. Avoid requirement for NPM install scripts. --- packages/powersync-db-collection/package.json | 5 +- .../powersync-db-collection/src/powersync.ts | 31 +++++++---- .../tests/powersync.test.ts | 1 + pnpm-lock.yaml | 51 ++++++------------- 4 files changed, 39 insertions(+), 49 deletions(-) diff --git a/packages/powersync-db-collection/package.json b/packages/powersync-db-collection/package.json index 5588491e4..9e1e0213e 100644 --- a/packages/powersync-db-collection/package.json +++ b/packages/powersync-db-collection/package.json @@ -13,9 +13,8 @@ "@powersync/common": "^1.39.0" }, "devDependencies": { - "@powersync/common": "^1.39.0", - "@powersync/better-sqlite3": "^0.2.0", - "@powersync/node": "^0.11.0", + "@powersync/common": "0.0.0-dev-20251003085035", + "@powersync/node": "0.0.0-dev-20251003085035", "@types/debug": "^4.1.12", "@vitest/coverage-istanbul": "^3.2.4" }, diff --git a/packages/powersync-db-collection/src/powersync.ts b/packages/powersync-db-collection/src/powersync.ts index 86a69d07c..ed24ba369 100644 --- a/packages/powersync-db-collection/src/powersync.ts +++ b/packages/powersync-db-collection/src/powersync.ts @@ -3,19 +3,19 @@ import { DEFAULT_BATCH_SIZE } from "./definitions" import { asPowerSyncRecord, mapOperation } from "./helpers" import { PendingOperationStore } from "./PendingOperationStore" import { PowerSyncTransactor } from "./PowerSyncTransactor" +import type { TriggerDiffRecord } from "@powersync/common" +import type { StandardSchemaV1 } from "@standard-schema/spec" +import type { + CollectionConfig, + InferSchemaOutput, + SyncConfig, +} from "@tanstack/db" import type { EnhancedPowerSyncCollectionConfig, PowerSyncCollectionConfig, PowerSyncCollectionUtils, } from "./definitions" import type { PendingOperation } from "./PendingOperationStore" -import type { - CollectionConfig, - InferSchemaOutput, - SyncConfig, -} from "@tanstack/db" -import type { StandardSchemaV1 } from "@standard-schema/spec" -import type { TriggerDiffRecord } from "@powersync/common" /** * Creates PowerSync collection options for use with a standard Collection @@ -142,7 +142,9 @@ export function powerSyncCollectionOptions< // The sync function needs to be synchronous async function start() { - database.logger.info(`Sync is starting`) + database.logger.info( + `Sync is starting for ${tableName} into ${trackedTableName}` + ) database.onChangeWithCallback( { onChange: async () => { @@ -227,7 +229,9 @@ export function powerSyncCollectionOptions< commit() } markReady() - database.logger.info(`Sync is ready`) + database.logger.info( + `Sync is ready for ${tableName} into ${trackedTableName}` + ) }, }, }) @@ -247,11 +251,16 @@ export function powerSyncCollectionOptions< } start().catch((error) => - database.logger.error(`Could not start syncing process`, error) + database.logger.error( + `Could not start syncing process for ${tableName} into ${trackedTableName}`, + error + ) ) return () => { - database.logger.info(`Sync has been stopped`) + database.logger.info( + `Sync has been stopped for ${tableName} into ${trackedTableName}` + ) abortController.abort() } }, diff --git a/packages/powersync-db-collection/tests/powersync.test.ts b/packages/powersync-db-collection/tests/powersync.test.ts index 9c9b95ae0..78dcca83b 100644 --- a/packages/powersync-db-collection/tests/powersync.test.ts +++ b/packages/powersync-db-collection/tests/powersync.test.ts @@ -38,6 +38,7 @@ describe(`PowerSync Integration`, () => { database: { dbFilename: `test.sqlite`, dbLocation: tmpdir(), + implementation: { type: `node:sqlite` }, }, schema: APP_SCHEMA, }) diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index 3661de7ff..c0815337d 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -668,15 +668,12 @@ importers: specifier: ^4.0.1 version: 4.0.1 devDependencies: - '@powersync/better-sqlite3': - specifier: ^0.2.0 - version: 0.2.0 '@powersync/common': - specifier: ^1.39.0 - version: 1.39.0 + specifier: 0.0.0-dev-20251003085035 + version: 0.0.0-dev-20251003085035 '@powersync/node': - specifier: ^0.11.0 - version: 0.11.0(@powersync/common@1.39.0) + specifier: 0.0.0-dev-20251003085035 + version: 0.0.0-dev-20251003085035(@powersync/common@0.0.0-dev-20251003085035) '@types/debug': specifier: ^4.1.12 version: 4.1.12 @@ -2860,16 +2857,17 @@ packages: resolution: {integrity: sha512-QNqXyfVS2wm9hweSYD2O7F0G06uurj9kZ96TRQE5Y9hU7+tgdZwIkbAKc5Ocy1HxEY2kuDQa6cQ1WRs/O5LFKA==} engines: {node: ^12.20.0 || ^14.18.0 || >=16.0.0} - '@powersync/better-sqlite3@0.2.0': - resolution: {integrity: sha512-8otwueqHJqwilUz/vLENlpMp2c4k/TV6hGX016XrZxSkizDAil99yRm7lAVwpbYYGuSgyzidyDh6vy6PY+m4kw==} + '@powersync/common@0.0.0-dev-20251003085035': + resolution: {integrity: sha512-k69aY8onIM4eXvj/obFkCadGmKgqMKSgk90Sih8lKF9BrGPGpQU/MtB6673LmhavURQnaS340FpBsL/4p/gk0g==} - '@powersync/common@1.39.0': - resolution: {integrity: sha512-qGPl/LPRoopNWjduGXfN+P3PsdTMfFR9YI2TbsLA++VRMK+10To9ey3Z6yprKoVbdLmisPde9mAaTvb1ugkeyg==} - - '@powersync/node@0.11.0': - resolution: {integrity: sha512-33J3/TnZ+s9mu0pHFfJCZhSQp7C+Ai4/1sBxC7aNdiRCyvg8DBYY8P7gMYXlyZfyMQHc0hfs3GDJzxYOBMNAyQ==} + '@powersync/node@0.0.0-dev-20251003085035': + resolution: {integrity: sha512-PbmpmiaHxWNDLQw4bNBakezQwPXCxqeef8E6uzxUH+baYmsIfXx5OY+lI6XtdQ+PbLjY4hFtxHnDb2qpSzJZPg==} peerDependencies: - '@powersync/common': ^1.39.0 + '@powersync/common': 0.0.0-dev-20251003085035 + better-sqlite3: 12.x + peerDependenciesMeta: + better-sqlite3: + optional: true '@protobufjs/aspromise@1.1.2': resolution: {integrity: sha512-j+gKExEuLmKwvz3OgROXtrJ2UG2x8Ch2YZUxahh+s1F2HZ+wAceUNLkvy6zKCPVRkU++ZWQrdxsUeQXmcg4uoQ==} @@ -4335,9 +4333,6 @@ packages: resolution: {integrity: sha512-Ceh+7ox5qe7LJuLHoY0feh3pHuUDHAcRUeyL2VYghZwfpkNIy/+8Ocg0a3UuSoYzavmylwuLWQOf3hl0jjMMIw==} engines: {node: '>=8'} - bindings@1.5.0: - resolution: {integrity: sha512-p2q/t/mhvuOj/UeLlV6566GD/guowlr0hHxClI0W9m7MWYkL1F0hLo+0Aexs9HSPCtR1SXQ0TD3MMKrXZajbiQ==} - body-parser@1.20.3: resolution: {integrity: sha512-7rAxByjUMqQ3/bHJy7D6OGXvx/MMc4IqBn/X0fcM1QUcAItpZrBEYhWGem+tzXH90c+G01ypMcYJBO9Y30203g==} engines: {node: '>= 0.8', npm: 1.2.8000 || >= 1.4.16} @@ -5322,9 +5317,6 @@ packages: resolution: {integrity: sha512-XXTUwCvisa5oacNGRP9SfNtYBNAMi+RPwBFmblZEF7N7swHYQS6/Zfk7SRwx4D5j3CH211YNRco1DEMNVfZCnQ==} engines: {node: '>=16.0.0'} - file-uri-to-path@1.0.0: - resolution: {integrity: sha512-0Zt+s3L7Vf1biwWZ29aARiVYLx7iMGnEUl9x33fbB/j3jR81u/O2LbqK+Bm1CDSNDKVtJ/YjwY7TUd5SkeLQLw==} - fill-range@7.1.1: resolution: {integrity: sha512-YsGpe3WHLK8ZYi4tWDg2Jy3ebRz2rXowDxnld4bkQB00cc/1Zw9AWnC0i9ztDJitivtQvaI9KaLyKrc+hBW0yg==} engines: {node: '>=8'} @@ -10650,18 +10642,13 @@ snapshots: '@pkgr/core@0.2.9': {} - '@powersync/better-sqlite3@0.2.0': - dependencies: - bindings: 1.5.0 - - '@powersync/common@1.39.0': + '@powersync/common@0.0.0-dev-20251003085035': dependencies: js-logger: 1.6.1 - '@powersync/node@0.11.0(@powersync/common@1.39.0)': + '@powersync/node@0.0.0-dev-20251003085035(@powersync/common@0.0.0-dev-20251003085035)': dependencies: - '@powersync/better-sqlite3': 0.2.0 - '@powersync/common': 1.39.0 + '@powersync/common': 0.0.0-dev-20251003085035 async-lock: 1.4.1 bson: 6.10.4 comlink: 4.4.2 @@ -12514,10 +12501,6 @@ snapshots: binary-extensions@2.3.0: {} - bindings@1.5.0: - dependencies: - file-uri-to-path: 1.0.0 - body-parser@1.20.3: dependencies: bytes: 3.1.2 @@ -13682,8 +13665,6 @@ snapshots: dependencies: flat-cache: 4.0.1 - file-uri-to-path@1.0.0: {} - fill-range@7.1.1: dependencies: to-regex-range: 5.0.1 From 8d489e9556182120c97f2742001c066fd0c54210 Mon Sep 17 00:00:00 2001 From: stevensJourney Date: Tue, 21 Oct 2025 13:59:39 +0200 Subject: [PATCH 16/25] Schemas Step 1: Infer types from PowerSync schema table. --- docs/collections/powersync-collection.md | 10 +- packages/powersync-db-collection/package.json | 6 +- .../src/definitions.ts | 23 ++- .../powersync-db-collection/src/helpers.ts | 22 +++ .../powersync-db-collection/src/powersync.ts | 87 ++++++----- .../powersync-db-collection/src/schema.ts | 29 +--- .../tests/powersync.test.ts | 138 ++++++------------ pnpm-lock.yaml | 24 +-- 8 files changed, 158 insertions(+), 181 deletions(-) diff --git a/docs/collections/powersync-collection.md b/docs/collections/powersync-collection.md index 0274a838c..4b070eb8a 100644 --- a/docs/collections/powersync-collection.md +++ b/docs/collections/powersync-collection.md @@ -85,22 +85,26 @@ db.connect(new Connector()) There are two ways to create a collection: using type inference or using schema validation. -#### Option 1: Using Type Inference +#### Option 1: Using Table Type Inference + +The collection types are automatically inferred from the PowerSync Schema Table definition. The table is used to construct a default StandardSchema validator which is used internally to validate collection data and operations. ```ts import { createCollection } from "@tanstack/react-db" import { powerSyncCollectionOptions } from "@tanstack/powersync-db-collection" const documentsCollection = createCollection( - powerSyncCollectionOptions({ + powerSyncCollectionOptions({ database: db, - tableName: "documents", + table: APP_SCHEMA.props.documents, }) ) ``` #### Option 2: Using Schema Validation +TODO + ```ts import { createCollection } from "@tanstack/react-db" import { diff --git a/packages/powersync-db-collection/package.json b/packages/powersync-db-collection/package.json index 9e1e0213e..f5af86b27 100644 --- a/packages/powersync-db-collection/package.json +++ b/packages/powersync-db-collection/package.json @@ -10,11 +10,11 @@ "p-defer": "^4.0.1" }, "peerDependencies": { - "@powersync/common": "^1.39.0" + "@powersync/common": "0.0.0-dev-20251021113138" }, "devDependencies": { - "@powersync/common": "0.0.0-dev-20251003085035", - "@powersync/node": "0.0.0-dev-20251003085035", + "@powersync/common": "0.0.0-dev-20251021113138", + "@powersync/node": "0.0.0-dev-20251021113138", "@types/debug": "^4.1.12", "@vitest/coverage-istanbul": "^3.2.4" }, diff --git a/packages/powersync-db-collection/src/definitions.ts b/packages/powersync-db-collection/src/definitions.ts index 69f3b243f..dec66a38d 100644 --- a/packages/powersync-db-collection/src/definitions.ts +++ b/packages/powersync-db-collection/src/definitions.ts @@ -1,4 +1,9 @@ -import type { AbstractPowerSyncDatabase } from "@powersync/common" +import type { ExtractedTable } from "./helpers" +import type { + AbstractPowerSyncDatabase, + ColumnsType, + Table, +} from "@powersync/common" import type { StandardSchemaV1 } from "@standard-schema/spec" import type { BaseCollectionConfig, CollectionConfig } from "@tanstack/db" @@ -36,14 +41,14 @@ import type { BaseCollectionConfig, CollectionConfig } from "@tanstack/db" * ``` */ export type PowerSyncCollectionConfig< - T extends object = Record, + TableType extends Table = Table, TSchema extends StandardSchemaV1 = never, > = Omit< - BaseCollectionConfig, + BaseCollectionConfig, string, TSchema>, `onInsert` | `onUpdate` | `onDelete` | `getKey` > & { - /** The name of the table in PowerSync database */ - tableName: string + /** The PowerSync Schema Table definition */ + table: TableType /** The PowerSync database instance */ database: AbstractPowerSyncDatabase /** @@ -73,9 +78,13 @@ export type PowerSyncCollectionMeta = { } export type EnhancedPowerSyncCollectionConfig< - T extends object = Record, + TableType extends Table = Table, TSchema extends StandardSchemaV1 = never, -> = CollectionConfig & { +> = CollectionConfig< + ExtractedTable, + string, + TSchema +> & { id?: string utils: PowerSyncCollectionUtils schema?: TSchema diff --git a/packages/powersync-db-collection/src/helpers.ts b/packages/powersync-db-collection/src/helpers.ts index 13ace1fb6..6cd309990 100644 --- a/packages/powersync-db-collection/src/helpers.ts +++ b/packages/powersync-db-collection/src/helpers.ts @@ -1,4 +1,5 @@ import { DiffTriggerOperation } from "@powersync/common" +import type { ColumnsType, ExtractColumnValueType } from "@powersync/common" /** * All PowerSync table records have a uuid `id` column. @@ -8,6 +9,27 @@ export type PowerSyncRecord = { [key: string]: unknown } +/** + * Utility type that extracts the typed structure of a table based on its column definitions. + * Maps each column to its corresponding TypeScript type using ExtractColumnValueType. + * + * @template Columns - The ColumnsType definition containing column configurations + * @example + * ```typescript + * const table = new Table({ + * name: column.text, + * age: column.integer + * }) + * type TableType = ExtractedTable + * // Results in: { name: string | null, age: number | null } + * ``` + */ +export type ExtractedTable = { + [K in keyof Columns]: ExtractColumnValueType +} & { + id: string +} + export function asPowerSyncRecord(record: any): PowerSyncRecord { if (typeof record.id !== `string`) { throw new Error(`Record must have a string id field`) diff --git a/packages/powersync-db-collection/src/powersync.ts b/packages/powersync-db-collection/src/powersync.ts index ed24ba369..049647d5a 100644 --- a/packages/powersync-db-collection/src/powersync.ts +++ b/packages/powersync-db-collection/src/powersync.ts @@ -3,19 +3,17 @@ import { DEFAULT_BATCH_SIZE } from "./definitions" import { asPowerSyncRecord, mapOperation } from "./helpers" import { PendingOperationStore } from "./PendingOperationStore" import { PowerSyncTransactor } from "./PowerSyncTransactor" -import type { TriggerDiffRecord } from "@powersync/common" -import type { StandardSchemaV1 } from "@standard-schema/spec" -import type { - CollectionConfig, - InferSchemaOutput, - SyncConfig, -} from "@tanstack/db" +import { convertTableToSchema } from "./schema" +import type { ExtractedTable } from "./helpers" +import type { PendingOperation } from "./PendingOperationStore" import type { EnhancedPowerSyncCollectionConfig, PowerSyncCollectionConfig, PowerSyncCollectionUtils, } from "./definitions" -import type { PendingOperation } from "./PendingOperationStore" +import type { CollectionConfig, SyncConfig } from "@tanstack/db" +import type { StandardSchemaV1 } from "@standard-schema/spec" +import type { ColumnsType, Table, TriggerDiffRecord } from "@powersync/common" /** * Creates PowerSync collection options for use with a standard Collection @@ -42,18 +40,19 @@ import type { PendingOperation } from "./PendingOperationStore" * const collection = createCollection( * powerSyncCollectionOptions({ * database: db, - * tableName: "documents", - * schema: APP_SCHEMA, + * table: APP_SCHEMA.props.documents, + * schema: TODO * }) * ) * ``` */ -export function powerSyncCollectionOptions( - config: PowerSyncCollectionConfig, T> -): CollectionConfig, string, T> & { - schema: T - utils: PowerSyncCollectionUtils -} +// TODO!!! +// export function powerSyncCollectionOptions( +// config: PowerSyncCollectionConfig, T> +// ): CollectionConfig, string, T> & { +// schema: T +// utils: PowerSyncCollectionUtils +// } /** * Creates a PowerSync collection configuration without schema validation. @@ -76,18 +75,20 @@ export function powerSyncCollectionOptions( * }) * * const collection = createCollection( - * powerSyncCollectionOptions({ + * powerSyncCollectionOptions({ * database: db, - * tableName: "documents", + * table: APP_SCHEMA.props.documents * }) * ) * ``` */ -export function powerSyncCollectionOptions( - config: PowerSyncCollectionConfig & { +export function powerSyncCollectionOptions< + TableType extends Table = Table, +>( + config: PowerSyncCollectionConfig & { schema?: never } -): CollectionConfig & { +): CollectionConfig, string> & { schema?: never utils: PowerSyncCollectionUtils } @@ -96,18 +97,24 @@ export function powerSyncCollectionOptions( * Implementation of powerSyncCollectionOptions that handles both schema and non-schema configurations. */ export function powerSyncCollectionOptions< - T extends object = Record, + TableType extends Table = Table, TSchema extends StandardSchemaV1 = never, >( - config: PowerSyncCollectionConfig -): EnhancedPowerSyncCollectionConfig { + config: PowerSyncCollectionConfig +): EnhancedPowerSyncCollectionConfig { const { database, - tableName, + table, + schema: inputSchema, syncBatchSize = DEFAULT_BATCH_SIZE, ...restConfig } = config + type RecordType = ExtractedTable + const { viewName } = table + + // We can do basic runtime validations for columns if not explicit schema has been provided + const schema = inputSchema ?? (convertTableToSchema(table) as TSchema) /** * The onInsert, onUpdate, onDelete handlers should only return * after we have written the changes to Tanstack DB. @@ -120,13 +127,13 @@ export function powerSyncCollectionOptions< */ const pendingOperationStore = PendingOperationStore.GLOBAL // Keep the tracked table unique in case of multiple tabs. - const trackedTableName = `__${tableName}_tracking_${Math.floor( + const trackedTableName = `__${viewName}_tracking_${Math.floor( Math.random() * 0xffffffff ) .toString(16) .padStart(8, `0`)}` - const transactor = new PowerSyncTransactor({ + const transactor = new PowerSyncTransactor({ database, }) @@ -135,7 +142,7 @@ export function powerSyncCollectionOptions< * Notice that this describes the Sync between the local SQLite table * and the in-memory tanstack-db collection. */ - const sync: SyncConfig = { + const sync: SyncConfig = { sync: (params) => { const { begin, write, commit, markReady } = params const abortController = new AbortController() @@ -143,7 +150,7 @@ export function powerSyncCollectionOptions< // The sync function needs to be synchronous async function start() { database.logger.info( - `Sync is starting for ${tableName} into ${trackedTableName}` + `Sync is starting for ${viewName} into ${trackedTableName}` ) database.onChangeWithCallback( { @@ -175,7 +182,7 @@ export function powerSyncCollectionOptions< id, operation, timestamp, - tableName, + tableName: viewName, }) } @@ -201,7 +208,7 @@ export function powerSyncCollectionOptions< ) const disposeTracking = await database.triggers.createDiffTrigger({ - source: tableName, + source: viewName, destination: trackedTableName, when: { [DiffTriggerOperation.INSERT]: `TRUE`, @@ -214,8 +221,8 @@ export function powerSyncCollectionOptions< let cursor = 0 while (currentBatchCount == syncBatchSize) { begin() - const batchItems = await context.getAll( - sanitizeSQL`SELECT * FROM ${tableName} LIMIT ? OFFSET ?`, + const batchItems = await context.getAll( + sanitizeSQL`SELECT * FROM ${viewName} LIMIT ? OFFSET ?`, [syncBatchSize, cursor] ) currentBatchCount = batchItems.length @@ -230,7 +237,7 @@ export function powerSyncCollectionOptions< } markReady() database.logger.info( - `Sync is ready for ${tableName} into ${trackedTableName}` + `Sync is ready for ${viewName} into ${trackedTableName}` ) }, }, @@ -252,14 +259,14 @@ export function powerSyncCollectionOptions< start().catch((error) => database.logger.error( - `Could not start syncing process for ${tableName} into ${trackedTableName}`, + `Could not start syncing process for ${viewName} into ${trackedTableName}`, error ) ) return () => { database.logger.info( - `Sync has been stopped for ${tableName} into ${trackedTableName}` + `Sync has been stopped for ${viewName} into ${trackedTableName}` ) abortController.abort() } @@ -268,16 +275,18 @@ export function powerSyncCollectionOptions< getSyncMetadata: undefined, } - const getKey = (record: T) => asPowerSyncRecord(record).id + const getKey = (record: RecordType) => asPowerSyncRecord(record).id - const outputConfig: EnhancedPowerSyncCollectionConfig = { + const outputConfig: EnhancedPowerSyncCollectionConfig = { ...restConfig, + schema, getKey, // Syncing should start immediately since we need to monitor the changes for mutations startSync: true, sync, onInsert: async (params) => { // The transaction here should only ever contain a single insert mutation + params.transaction return await transactor.applyTransaction(params.transaction) }, onUpdate: async (params) => { @@ -290,7 +299,7 @@ export function powerSyncCollectionOptions< }, utils: { getMeta: () => ({ - tableName, + tableName: viewName, trackedTableName, }), }, diff --git a/packages/powersync-db-collection/src/schema.ts b/packages/powersync-db-collection/src/schema.ts index dc894bf58..008a88424 100644 --- a/packages/powersync-db-collection/src/schema.ts +++ b/packages/powersync-db-collection/src/schema.ts @@ -1,32 +1,7 @@ import { ColumnType } from "@powersync/common" -import type { - ColumnsType, - ExtractColumnValueType, - Schema, - Table, -} from "@powersync/common" +import type { ColumnsType, Schema, Table } from "@powersync/common" import type { StandardSchemaV1 } from "@standard-schema/spec" - -/** - * Utility type that extracts the typed structure of a table based on its column definitions. - * Maps each column to its corresponding TypeScript type using ExtractColumnValueType. - * - * @template Columns - The ColumnsType definition containing column configurations - * @example - * ```typescript - * const table = new Table({ - * name: column.text, - * age: column.integer - * }) - * type TableType = ExtractedTable - * // Results in: { name: string | null, age: number | null } - * ``` - */ -type ExtractedTable = { - [K in keyof Columns]: ExtractColumnValueType -} & { - id: string -} +import type { ExtractedTable } from "./helpers" /** * Converts a PowerSync Table instance to a StandardSchemaV1 schema. diff --git a/packages/powersync-db-collection/tests/powersync.test.ts b/packages/powersync-db-collection/tests/powersync.test.ts index 78dcca83b..7e182a92c 100644 --- a/packages/powersync-db-collection/tests/powersync.test.ts +++ b/packages/powersync-db-collection/tests/powersync.test.ts @@ -17,21 +17,20 @@ import { import { describe, expect, it, onTestFinished, vi } from "vitest" import { powerSyncCollectionOptions } from "../src" import { PowerSyncTransactor } from "../src/PowerSyncTransactor" -import { convertPowerSyncSchemaToSpecs } from "../src/schema" import type { AbstractPowerSyncDatabase } from "@powersync/node" const APP_SCHEMA = new Schema({ users: new Table({ name: column.text, }), - documents: new Table({ - name: column.text, - }), + documents: new Table( + { + name: column.text, + }, + { viewName: `documents` } + ), }) -type Document = (typeof APP_SCHEMA)[`types`][`documents`] -type User = (typeof APP_SCHEMA)[`types`][`users`] - describe(`PowerSync Integration`, () => { async function createDatabase() { const db = new PowerSyncDatabase({ @@ -51,6 +50,18 @@ describe(`PowerSync Integration`, () => { return db } + function createDocumentsCollection(db: PowerSyncDatabase) { + const collection = createCollection( + powerSyncCollectionOptions({ + database: db, + // We get typing and a default validator from this + table: APP_SCHEMA.props.documents, + }) + ) + onTestFinished(() => collection.cleanup()) + return collection + } + async function createTestData(db: AbstractPowerSyncDatabase) { await db.execute(` INSERT into documents (id, name) @@ -62,17 +73,11 @@ describe(`PowerSync Integration`, () => { } describe(`schema`, () => { - it(`should accept a schema`, async () => { + it(`should use basic runtime validations from automatic schema`, async () => { const db = await createDatabase() // the collection should infer types and validate with the schema - const collection = createCollection( - powerSyncCollectionOptions({ - database: db, - tableName: `documents`, - schema: convertPowerSyncSchemaToSpecs(APP_SCHEMA).documents, - }) - ) + const collection = createDocumentsCollection(db) collection.insert({ id: randomUUID(), @@ -103,14 +108,7 @@ describe(`PowerSync Integration`, () => { it(`should initialize and fetch initial data`, async () => { const db = await createDatabase() await createTestData(db) - - const collection = createCollection( - powerSyncCollectionOptions({ - database: db, - tableName: `documents`, - }) - ) - onTestFinished(() => collection.cleanup()) + const collection = createDocumentsCollection(db) await collection.stateWhenReady() @@ -127,13 +125,7 @@ describe(`PowerSync Integration`, () => { const db = await createDatabase() await createTestData(db) - const collection = createCollection( - powerSyncCollectionOptions({ - database: db, - tableName: `documents`, - }) - ) - onTestFinished(() => collection.cleanup()) + const collection = createDocumentsCollection(db) await collection.stateWhenReady() @@ -203,14 +195,7 @@ describe(`PowerSync Integration`, () => { const db = await createDatabase() await createTestData(db) - const collection = createCollection( - powerSyncCollectionOptions({ - database: db, - tableName: `documents`, - }) - ) - onTestFinished(() => collection.cleanup()) - + const collection = createDocumentsCollection(db) await collection.stateWhenReady() // Verify the collection state contains our items @@ -263,14 +248,7 @@ describe(`PowerSync Integration`, () => { const db = await createDatabase() await createTestData(db) - const collection = createCollection( - powerSyncCollectionOptions({ - database: db, - tableName: `documents`, - }) - ) - onTestFinished(() => collection.cleanup()) - + const collection = createDocumentsCollection(db) await collection.stateWhenReady() expect(collection.size).toBe(3) @@ -319,18 +297,12 @@ describe(`PowerSync Integration`, () => { const db = await createDatabase() await createTestData(db) - const documentsCollection = createCollection( - powerSyncCollectionOptions({ - database: db, - tableName: `documents`, - }) - ) - onTestFinished(() => documentsCollection.cleanup()) + const documentsCollection = createDocumentsCollection(db) const usersCollection = createCollection( - powerSyncCollectionOptions({ + powerSyncCollectionOptions({ database: db, - tableName: `users`, + table: APP_SCHEMA.props.users, }) ) onTestFinished(() => usersCollection.cleanup()) @@ -388,15 +360,20 @@ describe(`PowerSync Integration`, () => { it(`should rollback transactions on error`, async () => { const db = await createDatabase() + const options = powerSyncCollectionOptions({ + database: db, + table: APP_SCHEMA.props.documents, + }) + + // This will cause the transactor to fail when writing to SQLite + vi.spyOn(options.utils, `getMeta`).mockImplementation(() => ({ + tableName: `fakeTable`, + trackedTableName: `error`, + })) // Create two collections for the same table - const collection = createCollection( - powerSyncCollectionOptions({ - database: db, - tableName: `documents`, - }) - ) - onTestFinished(() => collection.cleanup()) + const collection = createCollection(options) + onTestFinished(() => collection.cleanup()) const addTx = createTransaction({ autoCommit: false, mutationFn: async ({ transaction }) => { @@ -407,19 +384,18 @@ describe(`PowerSync Integration`, () => { }) expect(collection.size).eq(0) + await collection.stateWhenReady() + const id = randomUUID() - // Attempt to insert invalid data - // We can only do this since we aren't using schema validation here addTx.mutate(() => { collection.insert({ id, - name: new Error() as unknown as string, // This will cause a SQL error eventually + name: `aname`, }) }) - // This should be present in the optimisic state, but should be reverted when attempting to persist + // This should be present in the optimistic state, but should be reverted when attempting to persist expect(collection.size).eq(1) - expect((collection.get(id)?.name as any) instanceof Error).true try { await addTx.commit() @@ -436,13 +412,7 @@ describe(`PowerSync Integration`, () => { const db = await createDatabase() // Create two collections for the same table - const collection = createCollection( - powerSyncCollectionOptions({ - database: db, - tableName: `documents`, - }) - ) - onTestFinished(() => collection.cleanup()) + const collection = createDocumentsCollection(db) await collection.stateWhenReady() @@ -493,22 +463,10 @@ describe(`PowerSync Integration`, () => { const db = await createDatabase() // Create two collections for the same table - const collectionA = createCollection( - powerSyncCollectionOptions({ - database: db, - tableName: `documents`, - }) - ) - onTestFinished(() => collectionA.cleanup()) + const collectionA = createDocumentsCollection(db) await collectionA.stateWhenReady() - const collectionB = createCollection( - powerSyncCollectionOptions({ - database: db, - tableName: `documents`, - }) - ) - onTestFinished(() => collectionB.cleanup()) + const collectionB = createDocumentsCollection(db) await collectionB.stateWhenReady() await createTestData(db) @@ -527,9 +485,9 @@ describe(`PowerSync Integration`, () => { describe(`Lifecycle`, async () => { it(`should cleanup resources`, async () => { const db = await createDatabase() - const collectionOptions = powerSyncCollectionOptions({ + const collectionOptions = powerSyncCollectionOptions({ database: db, - tableName: `documents`, + table: APP_SCHEMA.props.documents, }) const meta = collectionOptions.utils.getMeta() diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index c0815337d..ae36530b8 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -669,11 +669,11 @@ importers: version: 4.0.1 devDependencies: '@powersync/common': - specifier: 0.0.0-dev-20251003085035 - version: 0.0.0-dev-20251003085035 + specifier: 0.0.0-dev-20251021113138 + version: 0.0.0-dev-20251021113138 '@powersync/node': - specifier: 0.0.0-dev-20251003085035 - version: 0.0.0-dev-20251003085035(@powersync/common@0.0.0-dev-20251003085035) + specifier: 0.0.0-dev-20251021113138 + version: 0.0.0-dev-20251021113138(@powersync/common@0.0.0-dev-20251021113138) '@types/debug': specifier: ^4.1.12 version: 4.1.12 @@ -2857,13 +2857,13 @@ packages: resolution: {integrity: sha512-QNqXyfVS2wm9hweSYD2O7F0G06uurj9kZ96TRQE5Y9hU7+tgdZwIkbAKc5Ocy1HxEY2kuDQa6cQ1WRs/O5LFKA==} engines: {node: ^12.20.0 || ^14.18.0 || >=16.0.0} - '@powersync/common@0.0.0-dev-20251003085035': - resolution: {integrity: sha512-k69aY8onIM4eXvj/obFkCadGmKgqMKSgk90Sih8lKF9BrGPGpQU/MtB6673LmhavURQnaS340FpBsL/4p/gk0g==} + '@powersync/common@0.0.0-dev-20251021113138': + resolution: {integrity: sha512-+E9+4p5xgzdIPr38EayS83+J04G9E89cVUMTCihgJ+P1VqZljuTBj/fnqshuALfcPofOpTryobsO+7CeWNuyEw==} - '@powersync/node@0.0.0-dev-20251003085035': - resolution: {integrity: sha512-PbmpmiaHxWNDLQw4bNBakezQwPXCxqeef8E6uzxUH+baYmsIfXx5OY+lI6XtdQ+PbLjY4hFtxHnDb2qpSzJZPg==} + '@powersync/node@0.0.0-dev-20251021113138': + resolution: {integrity: sha512-1fvtRWQdxgWMjmnOLXE1PbM6JqmxDn5xy4eI46VzSEOMjgkkah0Vn6C9mjPrs7gtlkuDMKRqPQm024Fl3CmPZQ==} peerDependencies: - '@powersync/common': 0.0.0-dev-20251003085035 + '@powersync/common': 0.0.0-dev-20251021113138 better-sqlite3: 12.x peerDependenciesMeta: better-sqlite3: @@ -10642,13 +10642,13 @@ snapshots: '@pkgr/core@0.2.9': {} - '@powersync/common@0.0.0-dev-20251003085035': + '@powersync/common@0.0.0-dev-20251021113138': dependencies: js-logger: 1.6.1 - '@powersync/node@0.0.0-dev-20251003085035(@powersync/common@0.0.0-dev-20251003085035)': + '@powersync/node@0.0.0-dev-20251021113138(@powersync/common@0.0.0-dev-20251021113138)': dependencies: - '@powersync/common': 0.0.0-dev-20251003085035 + '@powersync/common': 0.0.0-dev-20251021113138 async-lock: 1.4.1 bson: 6.10.4 comlink: 4.4.2 From 4692c8b2d0ed4f9199337c9e16a5cd150ac74fbb Mon Sep 17 00:00:00 2001 From: stevensJourney Date: Tue, 21 Oct 2025 17:57:19 +0200 Subject: [PATCH 17/25] Support input schema validations with Zod --- docs/collections/powersync-collection.md | 16 ++-- .../src/definitions.ts | 22 ++--- .../powersync-db-collection/src/helpers.ts | 10 ++- .../powersync-db-collection/src/powersync.ts | 85 ++++++++++--------- .../tests/powersync.test.ts | 53 ++++++++++++ 5 files changed, 121 insertions(+), 65 deletions(-) diff --git a/docs/collections/powersync-collection.md b/docs/collections/powersync-collection.md index 4b070eb8a..e7893a040 100644 --- a/docs/collections/powersync-collection.md +++ b/docs/collections/powersync-collection.md @@ -101,9 +101,9 @@ const documentsCollection = createCollection( ) ``` -#### Option 2: Using Schema Validation +#### Option 2: Using Advanced Schema Validation -TODO +Additional validations can be performed by supplying a Standard Schema. ```ts import { createCollection } from "@tanstack/react-db" @@ -111,15 +111,19 @@ import { powerSyncCollectionOptions, convertPowerSyncSchemaToSpecs, } from "@tanstack/powersync-db-collection" +import { z } from "zod" -// Convert PowerSync schema to TanStack DB schema -const schemas = convertPowerSyncSchemaToSpecs(APP_SCHEMA) +// The output of this schema must correspond to the SQLite schema +const schema = z.object({ + id: z.string(), + name: z.string().min(3, { message: errorMessage }).nullable(), +}) const documentsCollection = createCollection( powerSyncCollectionOptions({ database: db, - tableName: "documents", - schema: schemas.documents, // Use schema for runtime type validation + table: APP_SCHEMA.props.documents, + schema, }) ) ``` diff --git a/packages/powersync-db-collection/src/definitions.ts b/packages/powersync-db-collection/src/definitions.ts index dec66a38d..9f8e66c14 100644 --- a/packages/powersync-db-collection/src/definitions.ts +++ b/packages/powersync-db-collection/src/definitions.ts @@ -1,11 +1,7 @@ -import type { ExtractedTable } from "./helpers" -import type { - AbstractPowerSyncDatabase, - ColumnsType, - Table, -} from "@powersync/common" +import type { AbstractPowerSyncDatabase, Table } from "@powersync/common" import type { StandardSchemaV1 } from "@standard-schema/spec" import type { BaseCollectionConfig, CollectionConfig } from "@tanstack/db" +import type { ExtractedTable } from "./helpers" /** * Configuration interface for PowerSync collection options @@ -41,14 +37,14 @@ import type { BaseCollectionConfig, CollectionConfig } from "@tanstack/db" * ``` */ export type PowerSyncCollectionConfig< - TableType extends Table = Table, + TTable extends Table = Table, TSchema extends StandardSchemaV1 = never, > = Omit< - BaseCollectionConfig, string, TSchema>, + BaseCollectionConfig, string, TSchema>, `onInsert` | `onUpdate` | `onDelete` | `getKey` > & { /** The PowerSync Schema Table definition */ - table: TableType + table: TTable /** The PowerSync database instance */ database: AbstractPowerSyncDatabase /** @@ -78,13 +74,9 @@ export type PowerSyncCollectionMeta = { } export type EnhancedPowerSyncCollectionConfig< - TableType extends Table = Table, + TTable extends Table = Table, TSchema extends StandardSchemaV1 = never, -> = CollectionConfig< - ExtractedTable, - string, - TSchema -> & { +> = CollectionConfig, string, TSchema> & { id?: string utils: PowerSyncCollectionUtils schema?: TSchema diff --git a/packages/powersync-db-collection/src/helpers.ts b/packages/powersync-db-collection/src/helpers.ts index 6cd309990..76313caed 100644 --- a/packages/powersync-db-collection/src/helpers.ts +++ b/packages/powersync-db-collection/src/helpers.ts @@ -1,5 +1,5 @@ import { DiffTriggerOperation } from "@powersync/common" -import type { ColumnsType, ExtractColumnValueType } from "@powersync/common" +import type { ExtractColumnValueType, Table } from "@powersync/common" /** * All PowerSync table records have a uuid `id` column. @@ -20,12 +20,14 @@ export type PowerSyncRecord = { * name: column.text, * age: column.integer * }) - * type TableType = ExtractedTable + * type TableType = ExtractedTable * // Results in: { name: string | null, age: number | null } * ``` */ -export type ExtractedTable = { - [K in keyof Columns]: ExtractColumnValueType +export type ExtractedTable = { + [K in keyof TTable[`columnMap`]]: ExtractColumnValueType< + TTable[`columnMap`][K] + > } & { id: string } diff --git a/packages/powersync-db-collection/src/powersync.ts b/packages/powersync-db-collection/src/powersync.ts index 049647d5a..cba14eda6 100644 --- a/packages/powersync-db-collection/src/powersync.ts +++ b/packages/powersync-db-collection/src/powersync.ts @@ -4,104 +4,110 @@ import { asPowerSyncRecord, mapOperation } from "./helpers" import { PendingOperationStore } from "./PendingOperationStore" import { PowerSyncTransactor } from "./PowerSyncTransactor" import { convertTableToSchema } from "./schema" -import type { ExtractedTable } from "./helpers" -import type { PendingOperation } from "./PendingOperationStore" +import type { Table, TriggerDiffRecord } from "@powersync/common" +import type { StandardSchemaV1 } from "@standard-schema/spec" +import type { CollectionConfig, SyncConfig } from "@tanstack/db" import type { EnhancedPowerSyncCollectionConfig, PowerSyncCollectionConfig, PowerSyncCollectionUtils, } from "./definitions" -import type { CollectionConfig, SyncConfig } from "@tanstack/db" -import type { StandardSchemaV1 } from "@standard-schema/spec" -import type { ColumnsType, Table, TriggerDiffRecord } from "@powersync/common" +import type { ExtractedTable } from "./helpers" +import type { PendingOperation } from "./PendingOperationStore" /** * Creates PowerSync collection options for use with a standard Collection * - * @template TExplicit - The explicit type of items in the collection (highest priority) - * @template TSchema - The schema type for validation and type inference (second priority) + * @template TTable - The SQLite based typing + * @template TSchema - The schema type for validation - optionally supports a custom input type * @param config - Configuration options for the PowerSync collection * @returns Collection options with utilities */ -// Overload for when schema is provided /** - * Creates a PowerSync collection configuration with schema validation. + * Creates a PowerSync collection configuration with basic default validation. * * @example * ```typescript - * // With schema validation * const APP_SCHEMA = new Schema({ * documents: new Table({ * name: column.text, * }), * }) * + * type Document = (typeof APP_SCHEMA)["types"]["documents"] + * + * const db = new PowerSyncDatabase({ + * database: { + * dbFilename: "test.sqlite", + * }, + * schema: APP_SCHEMA, + * }) + * * const collection = createCollection( * powerSyncCollectionOptions({ * database: db, - * table: APP_SCHEMA.props.documents, - * schema: TODO + * table: APP_SCHEMA.props.documents * }) * ) * ``` */ -// TODO!!! -// export function powerSyncCollectionOptions( -// config: PowerSyncCollectionConfig, T> -// ): CollectionConfig, string, T> & { -// schema: T -// utils: PowerSyncCollectionUtils -// } +export function powerSyncCollectionOptions( + config: PowerSyncCollectionConfig +): CollectionConfig, string, never> & { + utils: PowerSyncCollectionUtils +} +// Overload for when schema is provided /** - * Creates a PowerSync collection configuration without schema validation. + * Creates a PowerSync collection configuration with schema validation. * * @example * ```typescript + * import { z } from "zod" + * + * // The PowerSync SQLite Schema * const APP_SCHEMA = new Schema({ * documents: new Table({ * name: column.text, * }), * }) * - * type Document = (typeof APP_SCHEMA)["types"]["documents"] - * - * const db = new PowerSyncDatabase({ - * database: { - * dbFilename: "test.sqlite", - * }, - * schema: APP_SCHEMA, + * // Advanced Zod validations. The output type of this schema + * // is constrained to the SQLite schema of APP_SCHEMA + * const schema = z.object({ + * id: z.string(), + * name: z.string().min(3, { message: "Should be at least 3 characters" }).nullable(), * }) * * const collection = createCollection( * powerSyncCollectionOptions({ * database: db, - * table: APP_SCHEMA.props.documents + * table: APP_SCHEMA.props.documents, + * schema * }) * ) * ``` */ export function powerSyncCollectionOptions< - TableType extends Table = Table, + TTable extends Table, + TSchema extends StandardSchemaV1, any>, >( - config: PowerSyncCollectionConfig & { - schema?: never - } -): CollectionConfig, string> & { - schema?: never + config: PowerSyncCollectionConfig +): CollectionConfig, string, TSchema> & { utils: PowerSyncCollectionUtils + schema: TSchema } /** * Implementation of powerSyncCollectionOptions that handles both schema and non-schema configurations. */ export function powerSyncCollectionOptions< - TableType extends Table = Table, + TTable extends Table = Table, TSchema extends StandardSchemaV1 = never, >( - config: PowerSyncCollectionConfig -): EnhancedPowerSyncCollectionConfig { + config: PowerSyncCollectionConfig +): EnhancedPowerSyncCollectionConfig { const { database, table, @@ -110,7 +116,7 @@ export function powerSyncCollectionOptions< ...restConfig } = config - type RecordType = ExtractedTable + type RecordType = ExtractedTable const { viewName } = table // We can do basic runtime validations for columns if not explicit schema has been provided @@ -277,7 +283,7 @@ export function powerSyncCollectionOptions< const getKey = (record: RecordType) => asPowerSyncRecord(record).id - const outputConfig: EnhancedPowerSyncCollectionConfig = { + const outputConfig: EnhancedPowerSyncCollectionConfig = { ...restConfig, schema, getKey, @@ -286,7 +292,6 @@ export function powerSyncCollectionOptions< sync, onInsert: async (params) => { // The transaction here should only ever contain a single insert mutation - params.transaction return await transactor.applyTransaction(params.transaction) }, onUpdate: async (params) => { diff --git a/packages/powersync-db-collection/tests/powersync.test.ts b/packages/powersync-db-collection/tests/powersync.test.ts index 7e182a92c..81f973255 100644 --- a/packages/powersync-db-collection/tests/powersync.test.ts +++ b/packages/powersync-db-collection/tests/powersync.test.ts @@ -15,6 +15,7 @@ import { liveQueryCollectionOptions, } from "@tanstack/db" import { describe, expect, it, onTestFinished, vi } from "vitest" +import { z } from "zod" import { powerSyncCollectionOptions } from "../src" import { PowerSyncTransactor } from "../src/PowerSyncTransactor" import type { AbstractPowerSyncDatabase } from "@powersync/node" @@ -102,6 +103,58 @@ describe(`PowerSync Integration`, () => { } } }) + + it(`should allow for advanced validations`, async () => { + const db = await createDatabase() + + const errorMessage = `Name must be at least 3 characters` + const schema = z.object({ + id: z.string(), + name: z.string().min(3, { message: errorMessage }).nullable(), + }) + + const collection = createCollection( + powerSyncCollectionOptions({ + database: db, + table: APP_SCHEMA.props.documents, + schema, + }) + ) + onTestFinished(() => collection.cleanup()) + + try { + collection.insert({ + id: randomUUID(), + name: `2`, + }) + expect.fail(`Should throw a validation error`) + } catch (ex) { + expect(ex instanceof SchemaValidationError).true + if (ex instanceof SchemaValidationError) { + console.log(ex) + expect(ex.message).contains(errorMessage) + } + } + + collection.insert({ + id: randomUUID(), + name: null, + }) + + expect(collection.size).eq(1) + + // should validate inputs + try { + collection.insert({} as any) + console.log(`failed`) + } catch (ex) { + expect(ex instanceof SchemaValidationError).true + if (ex instanceof SchemaValidationError) { + console.log(ex) + expect(ex.message).contains(`Required - path: id`) + } + } + }) }) describe(`sync`, () => { From fb45f0271b966a8a3fa77707d552f162e845ef8b Mon Sep 17 00:00:00 2001 From: stevensJourney Date: Tue, 21 Oct 2025 18:15:28 +0200 Subject: [PATCH 18/25] update readme --- docs/collections/powersync-collection.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/docs/collections/powersync-collection.md b/docs/collections/powersync-collection.md index e7893a040..cf72445b6 100644 --- a/docs/collections/powersync-collection.md +++ b/docs/collections/powersync-collection.md @@ -156,8 +156,8 @@ The `powerSyncCollectionOptions` function accepts the following options: ```ts interface PowerSyncCollectionConfig { database: PowerSyncDatabase // PowerSync database instance - tableName: string // Name of the table in PowerSync - schema?: Schema // Optional schema for validation + table: Table // PowerSync schema table definition + schema?: Schema // Optional schema for additional validation } ``` From 7030117b654b191e08397a810d9c53ec5201fd8b Mon Sep 17 00:00:00 2001 From: stevensJourney Date: Wed, 22 Oct 2025 10:17:46 +0200 Subject: [PATCH 19/25] Update doc comments. Code cleanup. --- docs/collections/powersync-collection.md | 8 +- .../src/PowerSyncTransactor.ts | 20 ++-- .../src/definitions.ts | 17 +++- .../powersync-db-collection/src/helpers.ts | 22 ++++- packages/powersync-db-collection/src/index.ts | 1 - .../powersync-db-collection/src/schema.ts | 94 +++---------------- .../tests/powersync.test.ts | 15 ++- .../tests/schema.test.ts | 90 ++++++------------ 8 files changed, 93 insertions(+), 174 deletions(-) diff --git a/docs/collections/powersync-collection.md b/docs/collections/powersync-collection.md index cf72445b6..30655d14d 100644 --- a/docs/collections/powersync-collection.md +++ b/docs/collections/powersync-collection.md @@ -44,8 +44,6 @@ const APP_SCHEMA = new Schema({ }), }) -type Document = (typeof APP_SCHEMA)["types"]["documents"] - // Initialize PowerSync database const db = new PowerSyncDatabase({ database: { @@ -87,7 +85,7 @@ There are two ways to create a collection: using type inference or using schema #### Option 1: Using Table Type Inference -The collection types are automatically inferred from the PowerSync Schema Table definition. The table is used to construct a default StandardSchema validator which is used internally to validate collection data and operations. +The collection types are automatically inferred from the PowerSync Schema Table definition. The table is used to construct a default StandardSchema validator which is used internally to validate collection operations. ```ts import { createCollection } from "@tanstack/react-db" @@ -103,7 +101,7 @@ const documentsCollection = createCollection( #### Option 2: Using Advanced Schema Validation -Additional validations can be performed by supplying a Standard Schema. +Additional validations can be performed by supplying a Standard Schema. The typing of the validator is constrained to match the typing of the SQLite table. ```ts import { createCollection } from "@tanstack/react-db" @@ -128,8 +126,6 @@ const documentsCollection = createCollection( ) ``` -With schema validation, the collection will validate all inputs at runtime to ensure they match the PowerSync schema types. This provides an extra layer of type safety beyond TypeScript's compile-time checks. - ## Features ### Offline-First diff --git a/packages/powersync-db-collection/src/PowerSyncTransactor.ts b/packages/powersync-db-collection/src/PowerSyncTransactor.ts index 1c8b7445a..668ce48ff 100644 --- a/packages/powersync-db-collection/src/PowerSyncTransactor.ts +++ b/packages/powersync-db-collection/src/PowerSyncTransactor.ts @@ -2,10 +2,10 @@ import { sanitizeSQL } from "@powersync/common" import DebugModule from "debug" import { PendingOperationStore } from "./PendingOperationStore" import { asPowerSyncRecord, mapOperationToPowerSync } from "./helpers" -import type { AbstractPowerSyncDatabase, LockContext } from "@powersync/common" -import type { PendingMutation, Transaction } from "@tanstack/db" -import type { PendingOperation } from "./PendingOperationStore" import type { EnhancedPowerSyncCollectionConfig } from "./definitions" +import type { PendingOperation } from "./PendingOperationStore" +import type { PendingMutation, Transaction } from "@tanstack/db" +import type { AbstractPowerSyncDatabase, LockContext } from "@powersync/common" const debug = DebugModule.debug(`ts/db:powersync`) @@ -24,7 +24,7 @@ export type TransactorOptions = { * const collection = createCollection( * powerSyncCollectionOptions({ * database: db, - * tableName: "documents", + * table: APP_SCHEMA.props.documents, * }) * ) * @@ -239,15 +239,17 @@ export class PowerSyncTransactor> { waitForCompletion: boolean, handler: (tableName: string, mutation: PendingMutation) => Promise ): Promise { - const { tableName, trackedTableName } = ( - mutation.collection.config as EnhancedPowerSyncCollectionConfig - ).utils.getMeta() - - if (!tableName) { + if ( + typeof (mutation.collection.config as any).utils?.getMeta != `function` + ) { throw new Error(`Could not get tableName from mutation's collection config. The provided mutation might not have originated from PowerSync.`) } + const { tableName, trackedTableName } = ( + mutation.collection.config as unknown as EnhancedPowerSyncCollectionConfig + ).utils.getMeta() + await handler(sanitizeSQL`${tableName}`, mutation) if (!waitForCompletion) { diff --git a/packages/powersync-db-collection/src/definitions.ts b/packages/powersync-db-collection/src/definitions.ts index 9f8e66c14..826444190 100644 --- a/packages/powersync-db-collection/src/definitions.ts +++ b/packages/powersync-db-collection/src/definitions.ts @@ -5,7 +5,7 @@ import type { ExtractedTable } from "./helpers" /** * Configuration interface for PowerSync collection options - * @template T - The type of items in the collection + * @template TTable - The PowerSync table schema definition * @template TSchema - The schema type for validation */ /** @@ -19,8 +19,6 @@ import type { ExtractedTable } from "./helpers" * }), * }) * - * type Document = (typeof APP_SCHEMA)["types"]["documents"] - * * const db = new PowerSyncDatabase({ * database: { * dbFilename: "test.sqlite", @@ -29,9 +27,9 @@ import type { ExtractedTable } from "./helpers" * }) * * const collection = createCollection( - * powerSyncCollectionOptions({ + * powerSyncCollectionOptions({ * database: db, - * tableName: "documents", + * table: APP_SCHEMA.props.documents * }) * ) * ``` @@ -62,6 +60,9 @@ export type PowerSyncCollectionConfig< syncBatchSize?: number } +/** + * Meta data for the PowerSync Collection + */ export type PowerSyncCollectionMeta = { /** * The SQLite table representing the collection. @@ -73,6 +74,9 @@ export type PowerSyncCollectionMeta = { trackedTableName: string } +/** + * A CollectionConfig which includes utilities for PowerSync + */ export type EnhancedPowerSyncCollectionConfig< TTable extends Table = Table, TSchema extends StandardSchemaV1 = never, @@ -82,6 +86,9 @@ export type EnhancedPowerSyncCollectionConfig< schema?: TSchema } +/** + * Collection level utilities for PowerSync + */ export type PowerSyncCollectionUtils = { getMeta: () => PowerSyncCollectionMeta } diff --git a/packages/powersync-db-collection/src/helpers.ts b/packages/powersync-db-collection/src/helpers.ts index 76313caed..bdc3d90a7 100644 --- a/packages/powersync-db-collection/src/helpers.ts +++ b/packages/powersync-db-collection/src/helpers.ts @@ -9,11 +9,23 @@ export type PowerSyncRecord = { [key: string]: unknown } +/** + * Utility type: If T includes null, add undefined. + * PowerSync records typically are typed as `string | null`, where insert + * and update operations also allow not specifying a value at all (optional) + * */ +type WithUndefinedIfNull = null extends T ? T | undefined : T +type OptionalIfUndefined = { + [K in keyof T as undefined extends T[K] ? K : never]?: T[K] +} & { + [K in keyof T as undefined extends T[K] ? never : K]: T[K] +} + /** * Utility type that extracts the typed structure of a table based on its column definitions. * Maps each column to its corresponding TypeScript type using ExtractColumnValueType. * - * @template Columns - The ColumnsType definition containing column configurations + * @template TTable - The PowerSync table definition * @example * ```typescript * const table = new Table({ @@ -24,11 +36,11 @@ export type PowerSyncRecord = { * // Results in: { name: string | null, age: number | null } * ``` */ -export type ExtractedTable = { - [K in keyof TTable[`columnMap`]]: ExtractColumnValueType< - TTable[`columnMap`][K] +export type ExtractedTable = OptionalIfUndefined<{ + [K in keyof TTable[`columnMap`]]: WithUndefinedIfNull< + ExtractColumnValueType > -} & { +}> & { id: string } diff --git a/packages/powersync-db-collection/src/index.ts b/packages/powersync-db-collection/src/index.ts index 152f09076..6c8111f4c 100644 --- a/packages/powersync-db-collection/src/index.ts +++ b/packages/powersync-db-collection/src/index.ts @@ -1,4 +1,3 @@ export * from "./definitions" export * from "./powersync" export * from "./PowerSyncTransactor" -export * from "./schema" diff --git a/packages/powersync-db-collection/src/schema.ts b/packages/powersync-db-collection/src/schema.ts index 008a88424..c5c5750c2 100644 --- a/packages/powersync-db-collection/src/schema.ts +++ b/packages/powersync-db-collection/src/schema.ts @@ -1,5 +1,5 @@ import { ColumnType } from "@powersync/common" -import type { ColumnsType, Schema, Table } from "@powersync/common" +import type { Table } from "@powersync/common" import type { StandardSchemaV1 } from "@standard-schema/spec" import type { ExtractedTable } from "./helpers" @@ -8,7 +8,7 @@ import type { ExtractedTable } from "./helpers" * Creates a schema that validates the structure and types of table records * according to the PowerSync table definition. * - * @template Columns - The ColumnsType definition containing column configurations + * @template TTable - The PowerSync schema typed Table definition * @param table - The PowerSync Table instance to convert * @returns A StandardSchemaV1 compatible schema with proper type validation * @@ -18,26 +18,17 @@ import type { ExtractedTable } from "./helpers" * name: column.text, * age: column.integer * }) - * - * const schema = convertTableToSchema(usersTable) - * // Now you can use this schema with powerSyncCollectionOptions - * const collection = createCollection( - * powerSyncCollectionOptions({ - * database: db, - * tableName: "users", - * schema: schema - * }) - * ) * ``` */ -export function convertTableToSchema( - table: Table -): StandardSchemaV1> { +export function convertTableToSchema( + table: TTable +): StandardSchemaV1> { + type TExtracted = ExtractedTable // Create validate function that checks types according to column definitions const validate = ( value: unknown ): - | StandardSchemaV1.SuccessResult> + | StandardSchemaV1.SuccessResult | StandardSchemaV1.FailureResult => { if (typeof value != `object` || value == null) { return { @@ -61,7 +52,7 @@ export function convertTableToSchema( // Check each column for (const column of table.columns) { - const val = (value as ExtractedTable)[column.name] + const val = (value as TExtracted)[column.name as keyof TExtracted] if (val == null) { continue @@ -92,7 +83,7 @@ export function convertTableToSchema( return { issues } } - return { value: { ...value } as ExtractedTable } + return { value: { ...value } as TExtracted } } return { @@ -101,72 +92,9 @@ export function convertTableToSchema( vendor: `powersync`, validate, types: { - input: {} as ExtractedTable, - output: {} as ExtractedTable, + input: {} as TExtracted, + output: {} as TExtracted, }, }, } } - -/** - * Converts an entire PowerSync Schema (containing multiple tables) into a collection of StandardSchemaV1 schemas. - * Each table in the schema is converted to its own StandardSchemaV1 schema while preserving all type information. - * - * @template Tables - A record type mapping table names to their Table definitions - * @param schema - The PowerSync Schema containing multiple table definitions - * @returns An object where each key is a table name and each value is that table's StandardSchemaV1 schema - * - * @example - * ```typescript - * const mySchema = new Schema({ - * users: new Table({ - * name: column.text, - * age: column.integer - * }), - * posts: new Table({ - * title: column.text, - * views: column.integer - * }) - * }) - * - * const standardizedSchemas = convertSchemaToSpecs(mySchema) - * // Result has type: - * // { - * // users: StandardSchemaV1<{ name: string | null, age: number | null }>, - * // posts: StandardSchemaV1<{ title: string | null, views: number | null }> - * // } - * - * // Can be used with collections: - * const usersCollection = createCollection( - * powerSyncCollectionOptions({ - * database: db, - * tableName: "users", - * schema: standardizedSchemas.users - * }) - * ) - * ``` - */ -export function convertPowerSyncSchemaToSpecs< - Tables extends Record>, ->( - schema: Schema -): { - [TableName in keyof Tables]: StandardSchemaV1< - ExtractedTable - > -} { - // Create a map to store the standardized schemas - const standardizedSchemas = {} as { - [TableName in keyof Tables]: StandardSchemaV1< - ExtractedTable - > - } - - // Iterate through each table in the schema - schema.tables.forEach((table) => { - // Convert each table to a StandardSchemaV1 and store it in the result map - ;(standardizedSchemas as any)[table.name] = convertTableToSchema(table) - }) - - return standardizedSchemas -} diff --git a/packages/powersync-db-collection/tests/powersync.test.ts b/packages/powersync-db-collection/tests/powersync.test.ts index 81f973255..c12f48c3c 100644 --- a/packages/powersync-db-collection/tests/powersync.test.ts +++ b/packages/powersync-db-collection/tests/powersync.test.ts @@ -27,6 +27,7 @@ const APP_SCHEMA = new Schema({ documents: new Table( { name: column.text, + author: column.text, }, { viewName: `documents` } ), @@ -110,7 +111,11 @@ describe(`PowerSync Integration`, () => { const errorMessage = `Name must be at least 3 characters` const schema = z.object({ id: z.string(), - name: z.string().min(3, { message: errorMessage }).nullable(), + name: z + .string() + .min(3, { message: errorMessage }) + .nullable() + .optional(), }) const collection = createCollection( @@ -258,12 +263,14 @@ describe(`PowerSync Integration`, () => { const tx = collection.insert({ id, name: `new`, + author: `somebody`, }) // The insert should optimistically update the collection const newDoc = collection.get(id) expect(newDoc).toBeDefined() expect(newDoc!.name).toBe(`new`) + expect(newDoc!.author).toBe(`somebody`) await tx.isPersisted.promise // The item should now be present in PowerSync @@ -276,6 +283,8 @@ describe(`PowerSync Integration`, () => { const updatedDoc = collection.get(id) expect(updatedDoc).toBeDefined() expect(updatedDoc!.name).toBe(`updatedNew`) + // Only the updated field should be updated + expect(updatedDoc!.author).toBe(`somebody`) await collection.delete(id).isPersisted.promise @@ -511,7 +520,7 @@ describe(`PowerSync Integration`, () => { }) }) - describe(`Multiple Clients`, async () => { + describe(`Multiple Clients`, () => { it(`should sync updates between multiple clients`, async () => { const db = await createDatabase() @@ -535,7 +544,7 @@ describe(`PowerSync Integration`, () => { }) }) - describe(`Lifecycle`, async () => { + describe(`Lifecycle`, () => { it(`should cleanup resources`, async () => { const db = await createDatabase() const collectionOptions = powerSyncCollectionOptions({ diff --git a/packages/powersync-db-collection/tests/schema.test.ts b/packages/powersync-db-collection/tests/schema.test.ts index 62c562b76..2dc6fb25d 100644 --- a/packages/powersync-db-collection/tests/schema.test.ts +++ b/packages/powersync-db-collection/tests/schema.test.ts @@ -1,9 +1,6 @@ import { Schema, Table, column } from "@powersync/common" import { describe, expect, it } from "vitest" -import { - convertPowerSyncSchemaToSpecs, - convertTableToSchema, -} from "../src/schema" +import { convertTableToSchema } from "../src/schema" import type { StandardSchemaV1 } from "@standard-schema/spec" describe(`Schema Conversion`, () => { @@ -71,6 +68,29 @@ describe(`Schema Conversion`, () => { }) }) + it(`should handle optional values correctly`, () => { + const table = new Table({ + name: column.text, + age: column.integer, + }) + + const schema = convertTableToSchema(table) + + // Test validation with null values + const result = schema[`~standard`].validate({ + id: `123`, + name: null, + // Don't specify age + }) as StandardSchemaV1.SuccessResult + + expect(result.issues).toBeUndefined() + expect(result.value).toEqual({ + id: `123`, + name: null, + }) + expect(result.value.age).undefined + }) + it(`should require id field`, () => { const table = new Table({ name: column.text, @@ -112,61 +132,6 @@ describe(`Schema Conversion`, () => { real_col: 3.14, }) }) - }) - - describe(`convertPowerSyncSchemaToSpecs`, () => { - it(`should convert multiple tables in a schema`, () => { - const schema = new Schema({ - users: new Table({ - name: column.text, - age: column.integer, - }), - posts: new Table({ - title: column.text, - views: column.integer, - }), - }) - - const result = convertPowerSyncSchemaToSpecs(schema) - - // Test structure - expect(result).toHaveProperty(`users`) - expect(result).toHaveProperty(`posts`) - - // Test users table schema - const userValidResult = result.users[`~standard`].validate({ - id: `123`, - name: `John`, - age: 25, - }) as StandardSchemaV1.SuccessResult - - expect(userValidResult.issues).toBeUndefined() - expect(userValidResult.value).toEqual({ - id: `123`, - name: `John`, - age: 25, - }) - - // Test posts table schema - const postValidResult = result.posts[`~standard`].validate({ - id: `456`, - title: `Hello`, - views: 100, - }) as StandardSchemaV1.SuccessResult - - expect(postValidResult.issues).toBeUndefined() - expect(postValidResult.value).toEqual({ - id: `456`, - title: `Hello`, - views: 100, - }) - }) - - it(`should handle empty schema`, () => { - const schema = new Schema({}) - const result = convertPowerSyncSchemaToSpecs(schema) - expect(result).toEqual({}) - }) it(`should validate each table independently`, () => { const schema = new Schema({ @@ -178,15 +143,16 @@ describe(`Schema Conversion`, () => { }), }) - const result = convertPowerSyncSchemaToSpecs(schema) + const usersSchema = convertTableToSchema(schema.props.users) + const postsSchema = convertTableToSchema(schema.props.posts) // Test that invalid data in one table doesn't affect the other - const userInvalidResult = result.users[`~standard`].validate({ + const userInvalidResult = usersSchema[`~standard`].validate({ id: `123`, name: 42, // wrong type }) as StandardSchemaV1.FailureResult - const postValidResult = result.posts[`~standard`].validate({ + const postValidResult = postsSchema[`~standard`].validate({ id: `456`, views: 100, }) as StandardSchemaV1.SuccessResult From 829ce64011cb416879ab067958609879592fc595 Mon Sep 17 00:00:00 2001 From: stevensJourney Date: Wed, 22 Oct 2025 10:21:15 +0200 Subject: [PATCH 20/25] More doc cleanup --- .../src/PendingOperationStore.ts | 2 +- .../src/PowerSyncTransactor.ts | 32 +++++++++---------- .../src/definitions.ts | 16 +++++----- .../powersync-db-collection/src/helpers.ts | 10 +++--- .../powersync-db-collection/src/powersync.ts | 18 +++++------ .../powersync-db-collection/src/schema.ts | 4 +-- 6 files changed, 41 insertions(+), 41 deletions(-) diff --git a/packages/powersync-db-collection/src/PendingOperationStore.ts b/packages/powersync-db-collection/src/PendingOperationStore.ts index c804067b3..80edf3c14 100644 --- a/packages/powersync-db-collection/src/PendingOperationStore.ts +++ b/packages/powersync-db-collection/src/PendingOperationStore.ts @@ -13,7 +13,7 @@ export type PendingOperation = { * Optimistic mutations have their optimistic state discarded once transactions have * been applied. * We need to ensure that an applied transaction has been observed by the sync diff trigger - * before resoling the transaction application call. + * before resolving the transaction application call. * This store allows registering a wait for a pending operation to have been observed. */ export class PendingOperationStore { diff --git a/packages/powersync-db-collection/src/PowerSyncTransactor.ts b/packages/powersync-db-collection/src/PowerSyncTransactor.ts index 668ce48ff..262ad6e4d 100644 --- a/packages/powersync-db-collection/src/PowerSyncTransactor.ts +++ b/packages/powersync-db-collection/src/PowerSyncTransactor.ts @@ -1,11 +1,11 @@ import { sanitizeSQL } from "@powersync/common" import DebugModule from "debug" -import { PendingOperationStore } from "./PendingOperationStore" import { asPowerSyncRecord, mapOperationToPowerSync } from "./helpers" +import { PendingOperationStore } from "./PendingOperationStore" +import type { AbstractPowerSyncDatabase, LockContext } from "@powersync/common" +import type { PendingMutation, Transaction } from "@tanstack/db" import type { EnhancedPowerSyncCollectionConfig } from "./definitions" import type { PendingOperation } from "./PendingOperationStore" -import type { PendingMutation, Transaction } from "@tanstack/db" -import type { AbstractPowerSyncDatabase, LockContext } from "@powersync/common" const debug = DebugModule.debug(`ts/db:powersync`) @@ -29,16 +29,16 @@ export type TransactorOptions = { * ) * * const addTx = createTransaction({ - * autoCommit: false, - * mutationFn: async ({ transaction }) => { - * await new PowerSyncTransactor({database: db}).applyTransaction(transaction) - * }, + * autoCommit: false, + * mutationFn: async ({ transaction }) => { + * await new PowerSyncTransactor({ database: db }).applyTransaction(transaction) + * }, * }) * * addTx.mutate(() => { - * for (let i = 0; i < 5; i++) { - * collection.insert({ id: randomUUID(), name: `tx-${i}` }) - * } + * for (let i = 0; i < 5; i++) { + * collection.insert({ id: randomUUID(), name: `tx-${i}` }) + * } * }) * * await addTx.commit() @@ -58,7 +58,7 @@ export class PowerSyncTransactor> { } /** - * Persists a {@link Transaction} to PowerSync's SQLite DB. + * Persists a {@link Transaction} to the PowerSync SQLite database. */ async applyTransaction(transaction: Transaction) { const { mutations } = transaction @@ -67,8 +67,8 @@ export class PowerSyncTransactor> { return } /** - * The transaction might contain ops for different collections. - * We can do some optimizations for single collection transactions. + * The transaction might contain operations for different collections. + * We can do some optimizations for single-collection transactions. */ const mutationsCollectionIds = mutations.map( (mutation) => mutation.collection.id @@ -103,7 +103,7 @@ export class PowerSyncTransactor> { for (const [index, mutation] of mutations.entries()) { /** * Each collection processes events independently. We need to make sure the - * last operation for each collection has been seen. + * last operation for each collection has been observed. */ const shouldWait = index == lastCollectionMutationIndexes.get(mutation.collection.id) @@ -130,7 +130,7 @@ export class PowerSyncTransactor> { * Return a promise from the writeTransaction, without awaiting it. * This promise will resolve once the entire transaction has been * observed via the diff triggers. - * We return without awaiting in order to free the writeLock. + * We return without awaiting in order to free the write lock. */ return { whenComplete: Promise.all( @@ -231,7 +231,7 @@ export class PowerSyncTransactor> { * Helper function which wraps a persistence operation by: * - Fetching the mutation's collection's SQLite table details * - Executing the mutation - * - Returning the last pending diff op if required + * - Returning the last pending diff operation if required */ protected async handleOperationWithCompletion( mutation: PendingMutation, diff --git a/packages/powersync-db-collection/src/definitions.ts b/packages/powersync-db-collection/src/definitions.ts index 826444190..5c3b63d33 100644 --- a/packages/powersync-db-collection/src/definitions.ts +++ b/packages/powersync-db-collection/src/definitions.ts @@ -4,9 +4,9 @@ import type { BaseCollectionConfig, CollectionConfig } from "@tanstack/db" import type { ExtractedTable } from "./helpers" /** - * Configuration interface for PowerSync collection options + * Configuration interface for PowerSync collection options. * @template TTable - The PowerSync table schema definition - * @template TSchema - The schema type for validation + * @template TSchema - The validation schema type */ /** * Configuration options for creating a PowerSync collection. @@ -41,7 +41,7 @@ export type PowerSyncCollectionConfig< BaseCollectionConfig, string, TSchema>, `onInsert` | `onUpdate` | `onDelete` | `getKey` > & { - /** The PowerSync Schema Table definition */ + /** The PowerSync schema Table definition */ table: TTable /** The PowerSync database instance */ database: AbstractPowerSyncDatabase @@ -61,7 +61,7 @@ export type PowerSyncCollectionConfig< } /** - * Meta data for the PowerSync Collection + * Metadata for the PowerSync Collection. */ export type PowerSyncCollectionMeta = { /** @@ -69,13 +69,13 @@ export type PowerSyncCollectionMeta = { */ tableName: string /** - * The internal table used to track diff for the collection. + * The internal table used to track diffs for the collection. */ trackedTableName: string } /** - * A CollectionConfig which includes utilities for PowerSync + * A CollectionConfig which includes utilities for PowerSync. */ export type EnhancedPowerSyncCollectionConfig< TTable extends Table = Table, @@ -87,13 +87,13 @@ export type EnhancedPowerSyncCollectionConfig< } /** - * Collection level utilities for PowerSync + * Collection-level utilities for PowerSync. */ export type PowerSyncCollectionUtils = { getMeta: () => PowerSyncCollectionMeta } /** - * Default value for {@link PowerSyncCollectionConfig#syncBatchSize} + * Default value for {@link PowerSyncCollectionConfig#syncBatchSize}. */ export const DEFAULT_BATCH_SIZE = 1000 diff --git a/packages/powersync-db-collection/src/helpers.ts b/packages/powersync-db-collection/src/helpers.ts index bdc3d90a7..7d23dc171 100644 --- a/packages/powersync-db-collection/src/helpers.ts +++ b/packages/powersync-db-collection/src/helpers.ts @@ -2,7 +2,7 @@ import { DiffTriggerOperation } from "@powersync/common" import type { ExtractColumnValueType, Table } from "@powersync/common" /** - * All PowerSync table records have a uuid `id` column. + * All PowerSync table records include a UUID `id` column. */ export type PowerSyncRecord = { id: string @@ -10,10 +10,10 @@ export type PowerSyncRecord = { } /** - * Utility type: If T includes null, add undefined. - * PowerSync records typically are typed as `string | null`, where insert - * and update operations also allow not specifying a value at all (optional) - * */ + * Utility type: If T includes null, also allow undefined (to support optional fields in insert/update operations). + * PowerSync records are typically typed as `string | null`, where insert + * and update operations may also allow not specifying a value at all (optional). + */ type WithUndefinedIfNull = null extends T ? T | undefined : T type OptionalIfUndefined = { [K in keyof T as undefined extends T[K] ? K : never]?: T[K] diff --git a/packages/powersync-db-collection/src/powersync.ts b/packages/powersync-db-collection/src/powersync.ts index cba14eda6..52340cbb9 100644 --- a/packages/powersync-db-collection/src/powersync.ts +++ b/packages/powersync-db-collection/src/powersync.ts @@ -16,10 +16,10 @@ import type { ExtractedTable } from "./helpers" import type { PendingOperation } from "./PendingOperationStore" /** - * Creates PowerSync collection options for use with a standard Collection + * Creates PowerSync collection options for use with a standard Collection. * - * @template TTable - The SQLite based typing - * @template TSchema - The schema type for validation - optionally supports a custom input type + * @template TTable - The SQLite-based typing + * @template TSchema - The validation schema type (optionally supports a custom input type) * @param config - Configuration options for the PowerSync collection * @returns Collection options with utilities */ @@ -66,7 +66,7 @@ export function powerSyncCollectionOptions( * ```typescript * import { z } from "zod" * - * // The PowerSync SQLite Schema + * // The PowerSync SQLite schema * const APP_SCHEMA = new Schema({ * documents: new Table({ * name: column.text, @@ -76,8 +76,8 @@ export function powerSyncCollectionOptions( * // Advanced Zod validations. The output type of this schema * // is constrained to the SQLite schema of APP_SCHEMA * const schema = z.object({ - * id: z.string(), - * name: z.string().min(3, { message: "Should be at least 3 characters" }).nullable(), + * id: z.string(), + * name: z.string().min(3, { message: "Should be at least 3 characters" }).nullable(), * }) * * const collection = createCollection( @@ -122,9 +122,9 @@ export function powerSyncCollectionOptions< // We can do basic runtime validations for columns if not explicit schema has been provided const schema = inputSchema ?? (convertTableToSchema(table) as TSchema) /** - * The onInsert, onUpdate, onDelete handlers should only return - * after we have written the changes to Tanstack DB. - * We currently only write to Tanstack DB from a diff trigger. + * The onInsert, onUpdate, and onDelete handlers should only return + * after we have written the changes to TanStack DB. + * We currently only write to TanStack DB from a diff trigger. * We wait for the diff trigger to observe the change, * and only then return from the on[X] handlers. * This ensures that when the transaction is reported as diff --git a/packages/powersync-db-collection/src/schema.ts b/packages/powersync-db-collection/src/schema.ts index c5c5750c2..ca56c71b4 100644 --- a/packages/powersync-db-collection/src/schema.ts +++ b/packages/powersync-db-collection/src/schema.ts @@ -8,9 +8,9 @@ import type { ExtractedTable } from "./helpers" * Creates a schema that validates the structure and types of table records * according to the PowerSync table definition. * - * @template TTable - The PowerSync schema typed Table definition + * @template TTable - The PowerSync schema-typed Table definition * @param table - The PowerSync Table instance to convert - * @returns A StandardSchemaV1 compatible schema with proper type validation + * @returns A StandardSchemaV1-compatible schema with proper type validation * * @example * ```typescript From dd0cbc8aa120c7c2c1c4b34cd1b0aad7d7d7c95a Mon Sep 17 00:00:00 2001 From: stevensJourney Date: Wed, 22 Oct 2025 10:25:09 +0200 Subject: [PATCH 21/25] README cleanup --- docs/collections/powersync-collection.md | 18 +++++++++--------- 1 file changed, 9 insertions(+), 9 deletions(-) diff --git a/docs/collections/powersync-collection.md b/docs/collections/powersync-collection.md index 30655d14d..492d4bde7 100644 --- a/docs/collections/powersync-collection.md +++ b/docs/collections/powersync-collection.md @@ -85,7 +85,7 @@ There are two ways to create a collection: using type inference or using schema #### Option 1: Using Table Type Inference -The collection types are automatically inferred from the PowerSync Schema Table definition. The table is used to construct a default StandardSchema validator which is used internally to validate collection operations. +The collection types are automatically inferred from the PowerSync schema table definition. The table is used to construct a default standard schema validator which is used internally to validate collection operations. ```ts import { createCollection } from "@tanstack/react-db" @@ -101,20 +101,20 @@ const documentsCollection = createCollection( #### Option 2: Using Advanced Schema Validation -Additional validations can be performed by supplying a Standard Schema. The typing of the validator is constrained to match the typing of the SQLite table. +Additional validations can be performed by supplying a compatible validation schema (such as a Zod schema). The typing of the validator is constrained to match the typing of the SQLite table. ```ts import { createCollection } from "@tanstack/react-db" -import { - powerSyncCollectionOptions, - convertPowerSyncSchemaToSpecs, -} from "@tanstack/powersync-db-collection" +import { powerSyncCollectionOptions } from "@tanstack/powersync-db-collection" import { z } from "zod" -// The output of this schema must correspond to the SQLite schema +// The output of this schema must match the SQLite schema const schema = z.object({ id: z.string(), - name: z.string().min(3, { message: errorMessage }).nullable(), + name: z + .string() + .min(3, { message: "Should be at least 3 characters" }) + .nullable(), }) const documentsCollection = createCollection( @@ -153,7 +153,7 @@ The `powerSyncCollectionOptions` function accepts the following options: interface PowerSyncCollectionConfig { database: PowerSyncDatabase // PowerSync database instance table: Table // PowerSync schema table definition - schema?: Schema // Optional schema for additional validation + schema?: StandardSchemaV1 // Optional schema for additional validation (e.g., Zod schema) } ``` From e26bf27421a3e9e6d2ab6038c298010d01361fe3 Mon Sep 17 00:00:00 2001 From: stevensJourney Date: Wed, 22 Oct 2025 10:35:08 +0200 Subject: [PATCH 22/25] Cleanup tests --- packages/powersync-db-collection/package.json | 2 +- packages/powersync-db-collection/tests/powersync.test.ts | 4 ---- 2 files changed, 1 insertion(+), 5 deletions(-) diff --git a/packages/powersync-db-collection/package.json b/packages/powersync-db-collection/package.json index f5af86b27..10e20d84e 100644 --- a/packages/powersync-db-collection/package.json +++ b/packages/powersync-db-collection/package.json @@ -5,7 +5,7 @@ "dependencies": { "@standard-schema/spec": "^1.0.0", "@tanstack/db": "workspace:*", - "@tanstack/store": "^0.7.7", + "@tanstack/store": "^0.8.0", "debug": "^4.4.3", "p-defer": "^4.0.1" }, diff --git a/packages/powersync-db-collection/tests/powersync.test.ts b/packages/powersync-db-collection/tests/powersync.test.ts index c12f48c3c..47c0dad64 100644 --- a/packages/powersync-db-collection/tests/powersync.test.ts +++ b/packages/powersync-db-collection/tests/powersync.test.ts @@ -96,7 +96,6 @@ describe(`PowerSync Integration`, () => { // should validate inputs try { collection.insert({} as any) - console.log(`failed`) } catch (ex) { expect(ex instanceof SchemaValidationError).true if (ex instanceof SchemaValidationError) { @@ -136,7 +135,6 @@ describe(`PowerSync Integration`, () => { } catch (ex) { expect(ex instanceof SchemaValidationError).true if (ex instanceof SchemaValidationError) { - console.log(ex) expect(ex.message).contains(errorMessage) } } @@ -151,11 +149,9 @@ describe(`PowerSync Integration`, () => { // should validate inputs try { collection.insert({} as any) - console.log(`failed`) } catch (ex) { expect(ex instanceof SchemaValidationError).true if (ex instanceof SchemaValidationError) { - console.log(ex) expect(ex.message).contains(`Required - path: id`) } } From e207268c598d868b0fc9cfa64e6790ed90ddc82a Mon Sep 17 00:00:00 2001 From: stevensJourney Date: Wed, 22 Oct 2025 10:35:44 +0200 Subject: [PATCH 23/25] Update PowerSync dependencies --- pnpm-lock.yaml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index 7d36fdb81..0e74bbc01 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -659,8 +659,8 @@ importers: specifier: workspace:* version: link:../db '@tanstack/store': - specifier: ^0.7.7 - version: 0.7.7 + specifier: ^0.8.0 + version: 0.8.0 debug: specifier: ^4.4.3 version: 4.4.3 From e94cadfc066cb7db2f0b8d770f4dcc4816c6dc33 Mon Sep 17 00:00:00 2001 From: stevensJourney Date: Wed, 22 Oct 2025 12:04:55 +0200 Subject: [PATCH 24/25] Properly constrain types --- packages/powersync-db-collection/src/powersync.ts | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/packages/powersync-db-collection/src/powersync.ts b/packages/powersync-db-collection/src/powersync.ts index 52340cbb9..b5d440d74 100644 --- a/packages/powersync-db-collection/src/powersync.ts +++ b/packages/powersync-db-collection/src/powersync.ts @@ -91,7 +91,10 @@ export function powerSyncCollectionOptions( */ export function powerSyncCollectionOptions< TTable extends Table, - TSchema extends StandardSchemaV1, any>, + TSchema extends StandardSchemaV1< + ExtractedTable, + ExtractedTable + >, >( config: PowerSyncCollectionConfig ): CollectionConfig, string, TSchema> & { From b7fc0ffa87ab4231f7505d0255c2a672ff7eca86 Mon Sep 17 00:00:00 2001 From: stevensJourney Date: Wed, 22 Oct 2025 12:44:46 +0200 Subject: [PATCH 25/25] Allow custom input schema types --- docs/collections/powersync-collection.md | 7 ++--- .../powersync-db-collection/src/powersync.ts | 13 +++++---- .../tests/powersync.test.ts | 29 +++++++++++++++++++ 3 files changed, 38 insertions(+), 11 deletions(-) diff --git a/docs/collections/powersync-collection.md b/docs/collections/powersync-collection.md index 492d4bde7..5cea78c2f 100644 --- a/docs/collections/powersync-collection.md +++ b/docs/collections/powersync-collection.md @@ -101,7 +101,7 @@ const documentsCollection = createCollection( #### Option 2: Using Advanced Schema Validation -Additional validations can be performed by supplying a compatible validation schema (such as a Zod schema). The typing of the validator is constrained to match the typing of the SQLite table. +Additional validations can be performed by supplying a compatible validation schema (such as a Zod schema). The output typing of the validator is constrained to match the typing of the SQLite table. The input typing can be arbitrary. ```ts import { createCollection } from "@tanstack/react-db" @@ -111,10 +111,7 @@ import { z } from "zod" // The output of this schema must match the SQLite schema const schema = z.object({ id: z.string(), - name: z - .string() - .min(3, { message: "Should be at least 3 characters" }) - .nullable(), + name: z.string().min(3, { message: "Should be at least 3 characters" }), }) const documentsCollection = createCollection( diff --git a/packages/powersync-db-collection/src/powersync.ts b/packages/powersync-db-collection/src/powersync.ts index b5d440d74..882deb1b1 100644 --- a/packages/powersync-db-collection/src/powersync.ts +++ b/packages/powersync-db-collection/src/powersync.ts @@ -6,7 +6,11 @@ import { PowerSyncTransactor } from "./PowerSyncTransactor" import { convertTableToSchema } from "./schema" import type { Table, TriggerDiffRecord } from "@powersync/common" import type { StandardSchemaV1 } from "@standard-schema/spec" -import type { CollectionConfig, SyncConfig } from "@tanstack/db" +import type { + CollectionConfig, + InferSchemaOutput, + SyncConfig, +} from "@tanstack/db" import type { EnhancedPowerSyncCollectionConfig, PowerSyncCollectionConfig, @@ -91,13 +95,10 @@ export function powerSyncCollectionOptions( */ export function powerSyncCollectionOptions< TTable extends Table, - TSchema extends StandardSchemaV1< - ExtractedTable, - ExtractedTable - >, + TSchema extends StandardSchemaV1>, >( config: PowerSyncCollectionConfig -): CollectionConfig, string, TSchema> & { +): CollectionConfig, string, TSchema> & { utils: PowerSyncCollectionUtils schema: TSchema } diff --git a/packages/powersync-db-collection/tests/powersync.test.ts b/packages/powersync-db-collection/tests/powersync.test.ts index 47c0dad64..8a7a06c6b 100644 --- a/packages/powersync-db-collection/tests/powersync.test.ts +++ b/packages/powersync-db-collection/tests/powersync.test.ts @@ -156,6 +156,35 @@ describe(`PowerSync Integration`, () => { } } }) + + it(`should allow custom input types`, async () => { + const db = await createDatabase() + + // The input can be arbitrarily typed, as long as it converts to SQLite + const schema = z.object({ + id: z.string(), + name: z.number().transform((val) => `Number: ${val}`), + }) + + const collection = createCollection( + powerSyncCollectionOptions({ + database: db, + table: APP_SCHEMA.props.documents, + schema, + }) + ) + onTestFinished(() => collection.cleanup()) + + const id = randomUUID() + collection.insert({ + id, + name: 42, + }) + + const item = collection.get(id) + + expect(item?.name).eq(`Number: 42`) + }) }) describe(`sync`, () => {