From 4f22fe6fb75c208623b05115ccbcf9900d5808c5 Mon Sep 17 00:00:00 2001 From: Christiaan Landman Date: Fri, 6 Feb 2026 15:18:47 +0200 Subject: [PATCH 01/15] PoC. --- .../research/ARCHITECTURE.md | 608 ++++++++++++++++++ .../on-demand-sync-implementation-plan.md | 351 ++++++++++ .../research/on-demand-sync-research.md | 329 ++++++++++ .../research/writeup.md | 102 +++ packages/powersync-db-collection/src/index.ts | 1 + .../powersync-db-collection/src/powersync.ts | 89 ++- .../src/sqlite-compiler.ts | 273 ++++++++ .../tests/on-demand-sync.test.ts | 139 ++++ .../tests/sqlite-compiler.test.ts | 255 ++++++++ 9 files changed, 2140 insertions(+), 7 deletions(-) create mode 100644 packages/powersync-db-collection/research/ARCHITECTURE.md create mode 100644 packages/powersync-db-collection/research/on-demand-sync-implementation-plan.md create mode 100644 packages/powersync-db-collection/research/on-demand-sync-research.md create mode 100644 packages/powersync-db-collection/research/writeup.md create mode 100644 packages/powersync-db-collection/src/sqlite-compiler.ts create mode 100644 packages/powersync-db-collection/tests/on-demand-sync.test.ts create mode 100644 packages/powersync-db-collection/tests/sqlite-compiler.test.ts diff --git a/packages/powersync-db-collection/research/ARCHITECTURE.md b/packages/powersync-db-collection/research/ARCHITECTURE.md new file mode 100644 index 000000000..f77b34c0e --- /dev/null +++ b/packages/powersync-db-collection/research/ARCHITECTURE.md @@ -0,0 +1,608 @@ +# TanStack DB & PowerSync Integration: Architecture Deep Dive + +## Table of Contents + +1. [TanStack DB Overview](#1-tanstack-db-overview) +2. [Core Concepts](#2-core-concepts) +3. [Collection Architecture](#3-collection-architecture) +4. [State Management & Virtual Derived State](#4-state-management--virtual-derived-state) +5. [Transaction System](#5-transaction-system) +6. [Sync Protocol](#6-sync-protocol) +7. [Query System](#7-query-system) +8. [Indexing](#8-indexing) +9. [PowerSync Integration](#9-powersync-integration) +10. [End-to-End Data Flow](#10-end-to-end-data-flow) + +--- + +## 1. TanStack DB Overview + +TanStack DB is a **reactive, client-side data store** that provides: + +- **Normalized, collection-based data management** — data lives in typed collections keyed by a primary key. +- **Live queries** — queries automatically re-evaluate when underlying data changes. +- **Optimistic mutations** — writes apply to the UI instantly; the sync layer persists them asynchronously. +- **Pluggable sync backends** — PowerSync, ElectricSQL, RxDB, local storage, or custom adapters. +- **Framework adapters** — React, Vue, Svelte, Solid, Angular. + +The core library (`@tanstack/db`) is framework- and backend-agnostic. Backend packages like `@tanstack/powersync-db-collection` provide a `collectionOptions` factory that wires up sync, mutations, and schema conversion for a specific backend. + +### Package Map + +``` +packages/ +├── db/ # Core library (@tanstack/db) +├── db-ivm/ # Incremental View Maintenance engine +├── powersync-db-collection/ # PowerSync adapter ← this package +├── electric-db-collection/ # ElectricSQL adapter +├── rxdb-db-collection/ # RxDB adapter +├── query-db-collection/ # Generic query-based adapter +├── offline-transactions/ # Offline transaction queue +├── react-db/ # React bindings +├── vue-db/ # Vue bindings +├── svelte-db/ # Svelte bindings +├── solid-db/ # Solid bindings +└── angular-db/ # Angular bindings +``` + +--- + +## 2. Core Concepts + +### Collections + +A **Collection** is a typed, keyed store of objects — analogous to a database table. Every item has a unique key (usually `id`). Collections are the fundamental unit of data in TanStack DB. + +```typescript +const todos = createCollection({ + id: 'todos', + getKey: (todo) => todo.id, + schema: todoSchema, + sync: { /* backend-specific */ }, +}) +``` + +### Transactions + +All mutations (insert/update/delete) produce a **Transaction**. Transactions are optimistic — they apply to the UI immediately — and carry deferred promises (`isPersisted`, `isApplied`) that resolve when the backend confirms the write. + +### Sync + +The **sync layer** is a callback protocol. The backend calls `begin()` / `write()` / `commit()` / `markReady()` to push changes into the collection. The collection merges synced data with optimistic state to produce the visible state. + +### Live Queries + +Queries are defined with a SQL-like builder and compiled to an intermediate representation (IR). They produce **derived collections** that auto-update when source collections change. + +--- + +## 3. Collection Architecture + +A Collection (`packages/db/src/collection/index.ts`) delegates its responsibilities to seven specialized managers: + +``` +CollectionImpl +├── CollectionStateManager — holds synced data, optimistic mutations, virtual derived state +├── CollectionSyncManager — coordinates sync lifecycle and batching +├── CollectionMutationsManager — handles insert/update/delete operations +├── CollectionEventsManager — typed event emission (status changes, subscriber changes) +├── CollectionChangesManager — manages subscriptions and change propagation +├── CollectionLifecycleManager — status transitions (idle → loading → ready), GC +└── CollectionIndexesManager — auto-indexes and manual index management +``` + +### Status Lifecycle + +``` +idle ──→ loading ──→ ready ──→ cleaned-up + │ ↑ + └──→ error ──────────┘ +``` + +- **idle**: Created but no subscribers yet. +- **loading**: Sync started, waiting for `markReady()`. +- **ready**: Initial data loaded, queries can run. +- **error**: Sync or lifecycle error. +- **cleaned-up**: All subscribers gone, GC ran. + +### Lazy Activation + +Collections use a **lazy activation** pattern: sync only starts when the first subscriber appears. When all subscribers leave, a GC timeout starts. If no new subscriber arrives, the collection cleans up. + +--- + +## 4. State Management & Virtual Derived State + +**File**: `packages/db/src/collection/state.ts` + +The state manager holds three layers of data: + +``` +┌─────────────────────────────┐ +│ Visible (virtual) state │ ← what queries and UI see +├─────────────────────────────┤ +│ Optimistic upserts/deletes │ ← from local transactions not yet confirmed +├─────────────────────────────┤ +│ Synced data │ ← confirmed state from the backend +└─────────────────────────────┘ +``` + +**Key data structures:** + +| Field | Type | Purpose | +|---|---|---| +| `syncedData` | `SortedMap` | Backend-confirmed rows | +| `syncedMetadata` | `Map` | Per-row sync metadata | +| `optimisticUpserts` | `Map` | Locally inserted/updated rows | +| `optimisticDeletes` | `Set` | Locally deleted keys | +| `transactions` | `SortedMap` | Active transactions | + +**Virtual derivation**: When you call `state.get(key)`, it checks optimistic deletes first, then optimistic upserts, then synced data. There is no materialized "merged" copy — it's computed on read, avoiding double-bookkeeping. + +### SortedMap + +`SortedMap` (`packages/db/src/SortedMap.ts`) wraps a `Map` with a sorted key array for deterministic iteration. Insertions use binary search for O(log n) positioning. + +--- + +## 5. Transaction System + +**File**: `packages/db/src/transactions.ts` + +### Mutation Types + +```typescript +type PendingMutation = { + type: 'insert' | 'update' | 'delete' + key: string | number + value?: T // for insert + changes?: Partial // for update + previousValue?: T // for rollback +} +``` + +### Mutation Merging + +When multiple mutations target the same key within a transaction, they merge according to this truth table: + +| Existing | New | Result | +|---|---|---| +| insert | update | insert (merged) | +| insert | delete | both cancelled | +| update | update | update (last wins, changes unioned) | +| update | delete | delete | +| delete | insert | update | +| delete | delete | delete | + +### Transaction Lifecycle + +``` +pending ──→ persisting ──→ completed + │ + └──→ failed (rollback optimistic state) +``` + +Each transaction carries two deferred promises: +- **`isPersisted`** — resolves when the backend confirms the write. +- **`isApplied`** — resolves when the synced data reflects the change. + +--- + +## 6. Sync Protocol + +**File**: `packages/db/src/collection/sync.ts` + +The sync config is a callback that receives control functions: + +```typescript +sync: { + sync: ({ collection, begin, write, commit, markReady, truncate }) => { + // 1. Initial load + begin() + for (const row of initialData) { + write({ type: 'insert', key: row.id, value: row }) + } + commit() + markReady() // signals collection is ready + + // 2. Live changes + onChange((change) => { + begin() + write(change) + commit() + }) + + return () => { /* cleanup */ } + } +} +``` + +| Function | Purpose | +|---|---| +| `begin()` | Start a sync transaction | +| `write(msg)` | Add a change message (insert/update/delete) | +| `commit()` | Finalize the transaction, apply to synced state | +| `markReady()` | Signal initial load complete — collection transitions to "ready" | +| `truncate()` | Clear all synced data (used on 409 / must-refetch) | + +### Change Messages + +```typescript +type ChangeMessage = { + type: 'insert' | 'update' | 'delete' + key: TKey + value: T // for insert/update + previousValue?: T // for update + metadata?: unknown +} +``` + +--- + +## 7. Query System + +### Query IR + +**File**: `packages/db/src/query/ir.ts` + +Queries are represented as an intermediate representation (IR): + +```typescript +interface QueryIR { + from: CollectionRef | QueryRef + select?: SelectObject | AggregateSelect + join?: JoinClause[] + where?: WhereExpression[] + groupBy?: GroupByExpression + having?: HavingExpression[] + orderBy?: OrderByExpression + limit?: number + offset?: number + distinct?: true + singleResult?: true +} +``` + +### Builder API + +```typescript +const result = query + .from({ todos }) + .select({ id: todos.id, title: todos.title }) + .where(todos.completed.eq(true)) + .orderBy(todos.createdAt.desc()) + .limit(10) + .key(({ id }) => id) +``` + +### Live Query Collections + +Queries produce **derived collections** that update incrementally when source data changes. This is powered by the `@tanstack/db-ivm` (Incremental View Maintenance) package. + +--- + +## 8. Indexing + +**File**: `packages/db/src/indexes/` + +Collections support automatic and manual indexes: + +- **BTree Index**: For range queries and sorted iteration (`>`, `<`, `>=`, `<=`, `between`). +- **Hash Index**: For exact-match lookups (`=`, `in`). +- **Lazy Index**: Deferred building — the index isn't materialized until first queried. + +Auto-indexing can be `'eager'` (build immediately) or `'lazy'` (build on first query). + +--- + +## 9. PowerSync Integration + +**Package**: `@tanstack/powersync-db-collection` + +This package bridges TanStack DB with [PowerSync](https://www.powersync.com/), a sync layer that uses SQLite as the local database and provides offline-first sync with a PostgreSQL backend. + +### 9.1 Entry Point: `powerSyncCollectionOptions()` + +**File**: `packages/powersync-db-collection/src/powersync.ts` + +This factory function takes a PowerSync table + database and returns a complete TanStack DB collection config with sync, mutations, and schema handling wired up. + +```typescript +import { powerSyncCollectionOptions } from '@tanstack/powersync-db-collection' +import { createCollection } from '@tanstack/db' + +const todos = createCollection( + powerSyncCollectionOptions({ + database: powerSyncDb, + table: AppSchema.props.todos, + schema: zodTodoSchema, // optional — enables type transforms + }) +) +``` + +**What it generates:** + +| Config field | Generated value | +|---|---| +| `id` | Table name from PowerSync schema | +| `getKey` | `(row) => row.id` (PowerSync rows always have `id: string`) | +| `schema` | Converts PowerSync table columns to a StandardSchema validator, or uses the user-provided schema | +| `onInsert` | Delegates to `PowerSyncTransactor` | +| `onUpdate` | Delegates to `PowerSyncTransactor` | +| `onDelete` | Delegates to `PowerSyncTransactor` | +| `sync.sync` | Sets up diff trigger observation and initial data loading | +| `utils.getMeta()` | Returns table name, tracked table name, serialization info | + +### 9.2 Schema Conversion + +**File**: `packages/powersync-db-collection/src/schema.ts` + +PowerSync tables define columns with SQLite types (`TEXT`, `INTEGER`, `REAL`). The package converts these to a StandardSchema validator: + +``` +PowerSync Column Type → Validation Rule +TEXT → typeof value === 'string' +INTEGER → typeof value === 'number' && Number.isInteger(value) +REAL → typeof value === 'number' +``` + +Every table also gets an `id: string` field validated. + +When a user provides their own schema (e.g., Zod), the package uses that instead and handles serialization/deserialization between the rich TypeScript types and SQLite storage types. + +### 9.3 Serialization + +**File**: `packages/powersync-db-collection/src/serialization.ts` + +SQLite only stores TEXT, INTEGER, and REAL. When the user's schema has richer types (Date, boolean, nested objects), serialization converts them: + +| User Type | SQLite Type | Serialization | +|---|---|---| +| `string` | TEXT | as-is | +| `Date` | TEXT | `.toISOString()` | +| `object/array` | TEXT | `JSON.stringify()` | +| `number` | INTEGER/REAL | as-is | +| `boolean` | INTEGER | `true → 1`, `false → 0` | + +Custom serializers can be provided per-field: + +```typescript +powerSyncCollectionOptions({ + database: db, + table: schema.props.events, + schema: eventSchema, + serializer: { + startDate: (date: Date) => date.getTime(), // custom + }, +}) +``` + +### 9.4 Sync Implementation + +The sync function generated by `powerSyncCollectionOptions` does the following: + +#### Initial Load + +1. Query SQLite for all rows in the table, batched (`syncBatchSize`, default 1000). +2. For each batch, call `begin()` → `write()` for each row → `commit()`. +3. After all rows loaded, call `markReady()`. + +#### Live Change Observation + +PowerSync provides a **diff trigger** system. When data changes in SQLite (from sync or local writes), diff triggers fire with the operation type and affected row. + +The sync function: +1. Registers a diff trigger listener on the tracked table. +2. On each diff trigger event, reads the current row from SQLite. +3. Calls `begin()` → `write({ type, key, value })` → `commit()`. + +The tracked table name follows the pattern: if the table is `todos`, the tracked table is `ps_tracked__todos` (managed by PowerSync internally). + +#### Handling "Must Refetch" (409) + +If PowerSync signals a full re-sync is needed, the sync function calls `truncate()` to clear all synced data, then re-runs the initial load. + +### 9.5 PowerSyncTransactor + +**File**: `packages/powersync-db-collection/src/PowerSyncTransactor.ts` + +The transactor handles **outbound mutations** (user writes that need to persist to SQLite). + +#### Flow + +``` +User calls collection.insert({ title: 'Buy milk' }) + │ + ▼ +Transaction created (optimistic state applied immediately) + │ + ▼ +PowerSyncTransactor.applyTransaction(transaction) + │ + ├── Wait for all affected collections to be "ready" + │ + ├── Group mutations by collection + │ + ├── Execute in a PowerSync write transaction: + │ ├── INSERT INTO todos (id, title) VALUES (?, ?) + │ ├── UPDATE todos SET title = ? WHERE id = ? + │ └── DELETE FROM todos WHERE id = ? + │ + ├── Create PendingOperations for each mutation + │ + └── Return promise that resolves when diff triggers observe the changes +``` + +#### Key Detail: Waiting for Diff Triggers + +After writing to SQLite, the transactor doesn't resolve immediately. It creates `PendingOperation` entries and waits for the diff trigger observer (in the sync function) to see those same changes. This ensures the synced state in the collection is up-to-date before the transaction's `isPersisted` promise resolves. + +### 9.6 PendingOperationStore + +**File**: `packages/powersync-db-collection/src/PendingOperationStore.ts` + +A global singleton that bridges outbound mutations with the sync observer: + +``` +Transactor Sync Observer + │ │ + ├── store.waitFor(operation) ──────► │ + │ (creates deferred) │ + │ ├── store.resolvePendingFor(operations) + │ │ (resolves deferred) + ◄─────────────── resolved ────────────┘ +``` + +A `PendingOperation` contains: +- `tableName` — which table was modified +- `operation` — insert/update/delete +- `id` — the row ID +- `timestamp` — when the operation was created + +### 9.7 Type System + +PowerSync tables have a specific column structure. The package provides type helpers to bridge PowerSync's SQLite types with TypeScript: + +```typescript +// PowerSync table → TypeScript record type +type ExtractedTable = { + [K in keyof TTable['columnMap']]: SQLiteColumnType +} & { id: string } + +// When a user schema is provided, output type comes from the schema +type InferPowerSyncOutputType = + TSchema extends StandardSchemaV1 + ? StandardSchemaV1.InferOutput + : ExtractedTable +``` + +Three overloads of `powerSyncCollectionOptions` handle the three cases: +1. **No schema** — types are raw SQLite types (string | number | null). +2. **Schema with SQLite-compatible input** — schema validates but input types match SQLite. +3. **Schema with rich types** — schema has non-SQLite types (Date, boolean, etc.), serialization required. + +--- + +## 10. End-to-End Data Flow + +### Reading Data (Sync → UI) + +``` +PostgreSQL backend + │ + ▼ +PowerSync sync service + │ + ▼ +Local SQLite database + │ + ├── Diff triggers fire + │ + ▼ +Sync observer in powerSyncCollectionOptions + │ + ├── begin() → write(changeMessage) → commit() + │ + ▼ +CollectionStateManager.syncedData updated + │ + ▼ +CollectionChangesManager notifies subscribers + │ + ▼ +Live queries re-evaluate (via IVM) + │ + ▼ +React/Vue/Svelte component re-renders +``` + +### Writing Data (UI → Backend) + +``` +User action (e.g., click "Add Todo") + │ + ▼ +collection.insert({ id: uuid(), title: 'Buy milk' }) + │ + ├── Optimistic state applied immediately + │ └── UI re-renders with new todo + │ + ├── Transaction created (state: 'pending') + │ + ▼ +PowerSyncTransactor.applyTransaction() + │ + ├── Executes SQL: INSERT INTO todos ... + │ + ├── Creates PendingOperation + │ + ├── Transaction state → 'persisting' + │ + ▼ +Diff trigger fires (SQLite observed the insert) + │ + ├── Sync observer writes to collection + │ └── syncedData now contains the row + │ + ├── PendingOperationStore resolves + │ + ├── Optimistic state cleared for this key + │ + ├── Transaction state → 'completed' + │ + ▼ +transaction.isPersisted.promise resolves + │ + ▼ +PowerSync upload queue sends to backend + │ + ▼ +PostgreSQL backend persists the row +``` + +### Conflict Resolution + +When a sync update arrives for a key that has an optimistic mutation: +1. The synced data is updated. +2. The optimistic mutation remains active until its transaction completes. +3. The UI sees the optimistic version (optimistic takes priority). +4. When the transaction completes, the optimistic layer clears and the synced version becomes visible. + +If the backend rejects a write, the transaction moves to `'failed'`, the optimistic state rolls back, and the UI reverts to the synced version. + +--- + +## Key Files Reference + +### Core TanStack DB + +| File | Purpose | +|---|---| +| `packages/db/src/collection/index.ts` | Collection class, manager orchestration | +| `packages/db/src/collection/state.ts` | State manager, virtual derived state | +| `packages/db/src/collection/sync.ts` | Sync coordination | +| `packages/db/src/collection/mutations.ts` | Insert/update/delete handling | +| `packages/db/src/collection/events.ts` | Event types and emission | +| `packages/db/src/collection/changes.ts` | Subscription management | +| `packages/db/src/collection/lifecycle.ts` | Status transitions, GC | +| `packages/db/src/collection/subscription.ts` | Individual subscription tracking | +| `packages/db/src/transactions.ts` | Transaction class, mutation merging | +| `packages/db/src/query/ir.ts` | Query intermediate representation | +| `packages/db/src/query/builder/index.ts` | Fluent query builder | +| `packages/db/src/query/live-query-collection.ts` | Derived live collections | +| `packages/db/src/indexes/base-index.ts` | Index interface | +| `packages/db/src/SortedMap.ts` | Deterministic-order map | +| `packages/db/src/proxy.ts` | Change-tracking proxy for updates | +| `packages/db/src/types.ts` | All core type definitions | + +### PowerSync Integration + +| File | Purpose | +|---|---| +| `packages/powersync-db-collection/src/powersync.ts` | `powerSyncCollectionOptions()` factory | +| `packages/powersync-db-collection/src/PowerSyncTransactor.ts` | Outbound mutation handler | +| `packages/powersync-db-collection/src/PendingOperationStore.ts` | Bridges mutations ↔ sync observation | +| `packages/powersync-db-collection/src/schema.ts` | PowerSync table → StandardSchema conversion | +| `packages/powersync-db-collection/src/serialization.ts` | Rich types ↔ SQLite type conversion | +| `packages/powersync-db-collection/src/definitions.ts` | Type definitions, serializer config | +| `packages/powersync-db-collection/src/helpers.ts` | Type extraction, operation mapping | diff --git a/packages/powersync-db-collection/research/on-demand-sync-implementation-plan.md b/packages/powersync-db-collection/research/on-demand-sync-implementation-plan.md new file mode 100644 index 000000000..e1338f0bc --- /dev/null +++ b/packages/powersync-db-collection/research/on-demand-sync-implementation-plan.md @@ -0,0 +1,351 @@ +# Implementation Plan: `syncMode: 'on-demand'` for powersync-db-collection + +## Overview + +Add query-driven sync support to the PowerSync TanStack DB collection adapter. This enables loading data from the local PowerSync SQLite database into TanStack DB collections on-demand based on query predicates, rather than eagerly loading all data upfront. + +## Background + +### Current State +- PowerSync collection uses **eager loading only**: all data loaded in `beforeCreate` hook +- Syncs from local PowerSync SQLite → TanStack DB in-memory collection +- Uses diff triggers for continuous sync of changes +- No `loadSubset` or `unloadSubset` implementation + +### Target State +- Support `syncMode: 'eager'` (current behavior) and `syncMode: 'on-demand'` +- On-demand: collection marks ready immediately, loads data via `loadSubset` when queries need it +- Predicate push-down: convert TanStack expression trees to SQLite queries + +## Key Design Decisions + +### 1. SQLite vs PostgreSQL Compiler +Electric uses PostgreSQL. PowerSync uses SQLite. Key differences: +- Parameter placeholders: `?` instead of `$1, $2` +- IN operator: `IN (?, ?, ?)` instead of `= ANY($1)` +- No native ILIKE: use `LIKE` with `COLLATE NOCASE` +- Boolean storage: SQLite uses 0/1 integers (simpler than PG) + +### 2. Diff Trigger Behavior +Keep diff triggers firing for ALL SQLite changes regardless of sync mode. TanStack's subscription system filters changes based on query predicates. This ensures changes to loaded data propagate correctly. + +### 3. Deduplication Strategy +Use TanStack's built-in `DeduplicatedLoadSubset` class which: +- Tracks loaded predicates to avoid redundant queries +- Handles in-flight request deduplication +- Supports reset for must-refetch scenarios + +### 4. Unload Implementation +Start with no-op implementation. Data remains in SQLite; TanStack handles collection memory via subscription lifecycle/gcTime. + +--- + +## Implementation Steps + +### Step 1: Create SQLite Expression Compiler + +**New file**: `packages/powersync-db-collection/src/sqlite-compiler.ts` + +```typescript +import type { IR, LoadSubsetOptions } from '@tanstack/db' + +export interface SQLiteCompiledQuery { + where?: string + orderBy?: string + limit?: number + params: Array +} + +export function compileSQLite(options: LoadSubsetOptions): SQLiteCompiledQuery +``` + +**Operators to support**: +| TanStack Op | SQLite SQL | +|-------------|------------| +| eq | = | +| gt | > | +| gte | >= | +| lt | < | +| lte | <= | +| and | AND | +| or | OR | +| not | NOT | +| isNull/isUndefined | IS NULL | +| in | IN (?, ?, ...) | +| like | LIKE | +| ilike | LIKE (COLLATE NOCASE) | +| upper/lower | UPPER/LOWER | +| coalesce | COALESCE | + +### Step 2: Modify Sync Configuration + +**File**: `packages/powersync-db-collection/src/powersync.ts` + +#### Architecture Overview + +``` +┌─────────────────────────────────────────────────────────────────────┐ +│ PowerSync Adapter (powersync.ts) │ +│ │ +│ const sync: SyncConfig = { │ +│ sync: (params) => { │ +│ // 1. Define loadSubset inside sync function │ +│ const loadSubsetImpl = async (options) => { ... } │ +│ │ +│ // 2. Setup diff triggers, etc. │ +│ async function start() { ... } │ +│ start() │ +│ │ +│ // 3. Return SyncConfigRes with loadSubset │ +│ return { │ +│ cleanup: () => abortController.abort(), │ +│ loadSubset: syncMode === 'on-demand' ? loadSubsetImpl : undefined, +│ unloadSubset: syncMode === 'on-demand' ? () => {} : undefined, +│ } │ +│ } │ +│ } │ +└─────────────────────────────────────────────────────────────────────┘ + │ + ▼ +┌─────────────────────────────────────────────────────────────────────┐ +│ TanStack DB (CollectionSyncManager - sync.ts) │ +│ │ +│ startSync() { │ +│ const syncRes = this.config.sync.sync({ begin, write, ... }) │ +│ this.syncLoadSubsetFn = syncRes?.loadSubset // Stores it │ +│ │ +│ // Validates on-demand has loadSubset │ +│ if (syncMode === 'on-demand' && !this.syncLoadSubsetFn) { │ +│ throw new CollectionConfigurationError(...) │ +│ } │ +│ } │ +│ │ +│ loadSubset(options) { │ +│ if (syncMode === 'eager') return true // No-op for eager │ +│ return this.syncLoadSubsetFn(options) // Calls your function │ +│ } │ +└─────────────────────────────────────────────────────────────────────┘ + │ + ▼ +┌─────────────────────────────────────────────────────────────────────┐ +│ TanStack DB (CollectionSubscription - subscription.ts) │ +│ │ +│ requestSnapshot() { │ +│ // When query needs data, calls loadSubset │ +│ const syncResult = this.collection._sync.loadSubset({ │ +│ where: ..., │ +│ orderBy: ..., │ +│ limit: ..., │ +│ subscription: this, │ +│ }) │ +│ } │ +└─────────────────────────────────────────────────────────────────────┘ +``` + +#### Changes Required + +1. Extract `syncMode` from config (with default `'eager'`) +2. Modify `beforeCreate` hook: + - Eager: existing behavior (load all data, then markReady) + - On-demand: skip data load, markReady immediately +3. Define `loadSubsetImpl` function inside sync +4. Return `SyncConfigRes` object instead of just cleanup function + +### Step 3: Implement loadSubset Function + +**Full sync function structure** (showing where loadSubset fits): + +```typescript +const sync: SyncConfig = { + sync: (params) => { + const { begin, write, commit, markReady } = params + const abortController = new AbortController() + + // ───────────────────────────────────────────────────────────── + // Define loadSubset INSIDE the sync function (has access to + // begin/write/commit and database from closure) + // ───────────────────────────────────────────────────────────── + const loadSubsetImpl = async (options: LoadSubsetOptions): Promise => { + const { where, orderBy, limit, params } = compileSQLite(options) + + // Build SELECT query + let sql = `SELECT * FROM ${viewName}` + if (where) sql += ` WHERE ${where}` + if (orderBy) sql += ` ORDER BY ${orderBy}` + if (limit) sql += ` LIMIT ?` + + const queryParams = limit ? [...params, limit] : params + + // Execute against PowerSync SQLite + const rows = await database.getAll(sql, queryParams) + + // Write to TanStack collection + begin() + for (const row of rows) { + write({ type: 'insert', value: deserializeSyncRow(row) }) + } + commit() + } + + // ───────────────────────────────────────────────────────────── + // Existing async setup (diff triggers, initial load for eager) + // ───────────────────────────────────────────────────────────── + async function start() { + // ... existing onChangeWithCallback setup ... + + const disposeTracking = await database.triggers.createDiffTrigger({ + // ... existing trigger config ... + hooks: { + beforeCreate: async (context) => { + if (syncMode === 'on-demand') { + // On-demand: skip initial load, mark ready immediately + markReady() + return + } + + // Eager: existing batch loading logic + let currentBatchCount = syncBatchSize + let cursor = 0 + while (currentBatchCount == syncBatchSize) { + begin() + const batchItems = await context.getAll( + `SELECT * FROM ${viewName} LIMIT ? OFFSET ?`, + [syncBatchSize, cursor] + ) + // ... write each row ... + commit() + } + markReady() + }, + }, + }) + } + + start().catch((error) => /* ... */) + + // ───────────────────────────────────────────────────────────── + // Return SyncConfigRes (replaces the old `return () => { ... }`) + // ───────────────────────────────────────────────────────────── + return { + cleanup: () => { + database.logger.info(`Sync stopped for ${viewName}`) + abortController.abort() + }, + loadSubset: syncMode === 'on-demand' ? loadSubsetImpl : undefined, + unloadSubset: syncMode === 'on-demand' ? () => {} : undefined, + } + }, + getSyncMetadata: undefined, +} +``` + +### Step 4: Add Deduplication Wrapper + +```typescript +import { DeduplicatedLoadSubset } from '@tanstack/db' + +// Wrap loadSubset with deduplication +const dedupe = new DeduplicatedLoadSubset({ + loadSubset: loadSubsetImpl, + getKey: (row) => row.id +}) + +return { + cleanup: () => abortController.abort(), + loadSubset: syncMode === 'on-demand' ? dedupe.loadSubset : undefined, + unloadSubset: syncMode === 'on-demand' ? () => {} : undefined, // no-op for now +} +``` + +### Step 5: Export New Utilities + +**File**: `packages/powersync-db-collection/src/index.ts` + +Export the SQLite compiler for advanced users who want to customize query generation. + +--- + +## Files to Modify/Create + +| File | Action | Description | +|------|--------|-------------| +| `src/sqlite-compiler.ts` | Create | SQLite expression-to-SQL compiler | +| `src/powersync.ts` | Modify | Add syncMode handling, loadSubset impl | +| `src/definitions.ts` | Modify | Add any new config types if needed | +| `src/index.ts` | Modify | Export new utilities | +| `tests/sqlite-compiler.test.ts` | Create | Unit tests for SQL compiler | +| `tests/on-demand-sync.test.ts` | Create | Integration tests for on-demand mode | + +--- + +## Test Plan + +### Unit Tests: SQLite Compiler +- Each operator compiles correctly +- Nested expressions (AND/OR combinations) +- Parameter placeholder ordering +- Identifier quoting for column names +- Null value handling (error for comparison ops) +- OrderBy with direction and nulls handling +- Limit compilation + +### Integration Tests: On-Demand Sync +- Collection marks ready immediately without data +- loadSubset executes correct SQL +- Data appears in collection after loadSubset +- Duplicate rows handled (insert → update) +- Diff triggers still propagate changes +- Overlapping loadSubset calls are deduplicated +- Live queries trigger loadSubset correctly + +### E2E Test Scenario +```typescript +// 1. Create on-demand collection +const collection = createCollection(powerSyncCollectionOptions({ + database: db, + table: schema.products, + syncMode: 'on-demand', +})) + +// 2. Verify empty and ready +await collection.stateWhenReady() +expect(collection.size).toBe(0) + +// 3. Query triggers loadSubset +const query = useLiveQuery({ + query: (q) => q.from({ p: collection }) + .where(({ p }) => gt(p.price, 100)) + .select(({ p }) => p), +}) + +// 4. Verify data loaded +expect(query.data.length).toBeGreaterThan(0) +``` + +--- + +## Verification Steps + +1. Run existing tests to ensure eager mode still works +2. Run new SQLite compiler unit tests +3. Run on-demand integration tests +4. Manual testing with a React app using `useLiveQuery` + +--- + +## Design Decisions (Resolved) + +1. **Cursor-based pagination**: Start with basic `where`/`orderBy`/`limit`. Design the compiler interface to allow cursor support later without breaking changes. + +2. **Column name mapping**: Not needed. PowerSync schemas use snake_case directly in both JS and SQLite (e.g., `list_id`, `created_at`). No transformation required. + +3. **Error handling**: Log errors and continue. Keep collection in ready state so queries return empty/partial results rather than blocking the app. + +--- + +## References + +- TanStack DB types: `packages/db/src/types.ts` (LoadSubsetOptions, SyncConfigRes) +- Electric reference: `packages/electric-db-collection/src/sql-compiler.ts` +- Expression helpers: `packages/db/src/query/expression-helpers.ts` +- TanStack DB 0.5 blog: https://tanstack.com/blog/tanstack-db-0.5-query-driven-sync diff --git a/packages/powersync-db-collection/research/on-demand-sync-research.md b/packages/powersync-db-collection/research/on-demand-sync-research.md new file mode 100644 index 000000000..c168be51b --- /dev/null +++ b/packages/powersync-db-collection/research/on-demand-sync-research.md @@ -0,0 +1,329 @@ +# Research: On-Demand Sync for PowerSync + TanStack DB + +## Executive Summary + +This document captures research findings for implementing `syncMode: 'on-demand'` in the powersync-db-collection package. The feature enables query-driven data loading from the local PowerSync SQLite database into TanStack DB collections. + +--- + +## TanStack DB Query-Driven Sync (v0.5+) + +### Overview +TanStack DB v0.5 introduced "Query-Driven Sync" - a paradigm where components declare data needs through queries, which automatically translate into optimized data fetching. + +**Key concept**: "Your component's query becomes the API call." + +### Sync Modes + +| Mode | Behavior | Best For | +|------|----------|----------| +| `eager` | Load all data upfront, then mark ready | Small datasets (<10k rows), reference data | +| `on-demand` | Load data incrementally when queried | Large datasets, search interfaces, catalogs | + +### Core Interfaces + +```typescript +// packages/db/src/types.ts + +type SyncMode = `eager` | `on-demand` + +type LoadSubsetOptions = { + where?: BasicExpression // Filter predicate as expression tree + orderBy?: OrderBy // Sort specification + limit?: number // Row limit + cursor?: CursorExpressions // For cursor-based pagination + offset?: number // For offset-based pagination + subscription?: Subscription // The subscription requesting data +} + +type LoadSubsetFn = (options: LoadSubsetOptions) => true | Promise +type UnloadSubsetFn = (options: LoadSubsetOptions) => void + +type SyncConfigRes = { + cleanup?: CleanupFn + loadSubset?: LoadSubsetFn // Called when query needs data + unloadSubset?: UnloadSubsetFn // Called when data no longer needed +} +``` + +### Expression Tree Structure + +TanStack DB represents query predicates as expression trees: + +```typescript +type BasicExpression = + | { type: 'val', value: T } // Literal value + | { type: 'ref', path: string[] } // Column reference + | { type: 'func', name: string, args: BasicExpression[] } // Operator/function + +// Example: where price > 100 +{ + type: 'func', + name: 'gt', + args: [ + { type: 'ref', path: ['price'] }, + { type: 'val', value: 100 } + ] +} +``` + +### Expression Helpers + +TanStack DB provides utilities for parsing expression trees: + +```typescript +import { + parseWhereExpression, // Convert where to custom format + parseOrderByExpression, // Extract sort specs + extractSimpleComparisons, // Flatten AND-ed comparisons + parseLoadSubsetOptions, // Convenience wrapper + walkExpression, // Generic tree traversal + extractFieldPath, // Get column path from ref + extractValue // Get value from val +} from '@tanstack/db' +``` + +--- + +## Electric-db-collection Reference Implementation + +### Architecture + +Electric's implementation provides a complete on-demand sync solution for PostgreSQL. Key components: + +1. **sql-compiler.ts** - Converts expression trees to PostgreSQL SQL +2. **Deduplication** - Prevents redundant subset loads +3. **Progressive mode** - Hybrid eager+on-demand for fast initial load + +### SQL Compiler Patterns + +```typescript +// packages/electric-db-collection/src/sql-compiler.ts + +function compileSQL(options: LoadSubsetOptions): SubsetParams { + // 1. Compile where expression to SQL string + params + // 2. Compile orderBy to SQL string + // 3. Return { where, orderBy, limit, params } +} + +// Expression compilation is recursive: +// - val → parameter placeholder ($1, $2, ...) +// - ref → quoted identifier ("columnName") +// - func → operator/function call +``` + +### Operator Mappings + +| TanStack | PostgreSQL | Notes | +|----------|------------|-------| +| eq | = | | +| gt/gte/lt/lte | >/>=/2 args) | +| not | NOT | | +| isNull/isUndefined | IS NULL | | +| in | = ANY($1) | Array parameter | +| like/ilike | LIKE/ILIKE | | +| upper/lower | UPPER/LOWER | | + +### Deduplication Strategy + +Electric tracks loaded predicates to avoid redundant queries: + +```typescript +// Tracks what's been loaded +const syncedKeys = new Set() + +// On loadSubset: +// 1. Check if predicate already covered +// 2. Execute query if not covered +// 3. Track newly loaded keys +// 4. Convert inserts to updates for overlapping rows +``` + +--- + +## Current PowerSync Implementation + +### File: `packages/powersync-db-collection/src/powersync.ts` + +**Current behavior (eager only):** + +1. `beforeCreate` hook loads ALL data: + ```typescript + const batchItems = await context.getAll( + `SELECT * FROM ${viewName} LIMIT ? OFFSET ?`, + [syncBatchSize, cursor] + ) + ``` + +2. Diff triggers monitor SQLite changes: + ```typescript + database.triggers.createDiffTrigger({ + source: viewName, + destination: trackedTableName, + // Fires for INSERT, UPDATE, DELETE + }) + ``` + +3. Changes propagate to TanStack collection: + ```typescript + write({ + type: mapOperation(operation), // 'insert' | 'update' | 'delete' + value: deserializeSyncRow(row), + }) + ``` + +### Missing for On-Demand + +- No `syncMode` handling (always eager) +- No `loadSubset` implementation +- No SQLite expression compiler +- Returns only `cleanup`, not full `SyncConfigRes` + +--- + +## SQLite vs PostgreSQL Differences + +### Parameter Placeholders +- PostgreSQL: `$1, $2, $3` +- SQLite: `?, ?, ?` + +### Array Membership (IN operator) +- PostgreSQL: `column = ANY($1)` with array parameter +- SQLite: `column IN (?, ?, ?)` with individual parameters + +### Case-Insensitive LIKE +- PostgreSQL: `ILIKE` operator +- SQLite: `LIKE` with `COLLATE NOCASE` + +### Boolean Handling +- PostgreSQL: Native boolean type, needs special handling for < > comparisons +- SQLite: 0/1 integers, simpler comparison handling + +### Identifier Quoting +- PostgreSQL: `"identifier"` +- SQLite: `"identifier"` or `` `identifier` `` + +--- + +## Data Flow: On-Demand Sync + +``` +┌─────────────────┐ ┌──────────────────┐ ┌─────────────────┐ +│ React Query │────▶│ TanStack DB │────▶│ PowerSync │ +│ useLiveQuery │ │ Collection │ │ SQLite DB │ +└─────────────────┘ └──────────────────┘ └─────────────────┘ + │ │ │ + │ 1. Query declared │ │ + │ ─────────────────────▶│ │ + │ │ 2. loadSubset called │ + │ │ ──────────────────────▶│ + │ │ │ 3. SQL query + │ │ │ ─────────── + │ │ 4. Rows returned │ + │ │ ◀──────────────────────│ + │ 5. Data in collection │ │ + │ ◀─────────────────────│ │ + │ │ │ + │ 6. Live updates │ 7. Diff trigger fires │ + │ ◀─────────────────────│ ◀──────────────────────│ + │ │ │ +``` + +--- + +## TanStack DB Internal Architecture + +### Where loadSubset Lives + +**1. PowerSync provides it** (in sync function return): +```typescript +// powersync.ts +const sync: SyncConfig = { + sync: (params) => { + const loadSubsetImpl = async (options: LoadSubsetOptions) => { + // Query SQLite, write to collection + } + + return { + cleanup: () => { ... }, + loadSubset: syncMode === 'on-demand' ? loadSubsetImpl : undefined, + } + } +} +``` + +**2. TanStack stores it** (CollectionSyncManager.startSync): +```typescript +// packages/db/src/collection/sync.ts line 226 +this.syncLoadSubsetFn = syncRes?.loadSubset ?? null + +// Validation: on-demand REQUIRES loadSubset (lines 232-237) +if (this.syncMode === `on-demand` && !this.syncLoadSubsetFn) { + throw new CollectionConfigurationError(...) +} +``` + +**3. Subscriptions call it** (CollectionSubscription): +```typescript +// packages/db/src/collection/subscription.ts + +requestSnapshot() { + // line 366 + const syncResult = this.collection._sync.loadSubset({ + where: ..., + orderBy: ..., + limit: ..., + subscription: this, + }) +} + +requestLimitedSnapshot() { + // line 595 - for paginated queries + const syncResult = this.collection._sync.loadSubset({ + where, limit, orderBy, cursor, offset, subscription + }) +} +``` + +### Key Files in TanStack DB + +| File | Purpose | +|------|---------| +| `collection/sync.ts` | Stores loadSubset, validates on-demand config, exposes `loadSubset()` method | +| `collection/subscription.ts` | Calls loadSubset when queries need data, tracks loaded subsets for unload | +| `types.ts` | Defines `LoadSubsetOptions`, `SyncConfigRes`, `SyncMode` | +| `query/subset-dedupe.ts` | `DeduplicatedLoadSubset` class for preventing redundant loads | + +--- + +## Key Considerations + +### 1. Diff Triggers in On-Demand Mode +- Should continue firing for ALL changes +- TanStack filters by subscription predicates +- Ensures changes to loaded data propagate correctly + +### 2. Memory Management +- `unloadSubset` can be no-op initially +- Data stays in SQLite (source of truth) +- TanStack handles collection memory via gcTime/subscription lifecycle + +### 3. Deduplication +- Use TanStack's `DeduplicatedLoadSubset` class +- Handles overlapping queries efficiently +- Converts redundant inserts to updates + +### 4. Error Handling +- Log errors, don't block the app +- Keep collection in ready state +- Return empty/partial results on failure + +--- + +## Sources + +- [TanStack DB 0.5: Query-Driven Sync](https://tanstack.com/blog/tanstack-db-0.5-query-driven-sync) +- [TanStack DB Query Collection Docs](https://tanstack.com/db/latest/docs/collections/query-collection) +- [RFC: On-Demand Collection Loading via loadSubset](https://github.com/TanStack/db/discussions/676) +- [Electric + TanStack DB Integration](https://electric-sql.com/blog/2025/07/29/local-first-sync-with-tanstack-db) diff --git a/packages/powersync-db-collection/research/writeup.md b/packages/powersync-db-collection/research/writeup.md new file mode 100644 index 000000000..3586f3c38 --- /dev/null +++ b/packages/powersync-db-collection/research/writeup.md @@ -0,0 +1,102 @@ +# Query Driven Sync for PowerSync + TanstackDB + +## Background + +TanstackDB supports a collection option called "sync mode" which defines which strategy to use when syncing data to the TanstackDB collection. + +Note in the context of PowerSync and TanstackDB, there are two distinct definitions for "sync". For PowerSync, sync means syncing between the local PowerSync SQLite database and the remote sync service/backend. For TanstackDB, sync means syncing data from your local data source (in this case PowerSync's SQLite database) and the in-memory TanstackDB collection. + +TanstackDB describe its sync mode options as: +-------------------- +Eager mode (default): Loads entire collection upfront. Best for <10k rows of mostly static data like user preferences or small reference tables. +- On-demand mode (aka query driven sync): Loads only what queries request. Best for large datasets (>50k rows), search interfaces, and catalogs where most data won't be accessed. +- Progressive mode: Loads query subset immediately, syncs full dataset in background. Best for collaborative apps needing instant first paint AND sub-millisecond queries. +-------------------- + +The initial PowerSync-TanstackDB integration supports "Eager mode", effectively mirroring all the data from a PowerSync SQLite table to the collection. +We are interested in adding support for the on-demand/query driven mode. + +## Supporting On-Demand (Query Driven) Sync + +Query-driven sync works by evaluating queries against the given TanstackDB collection. The collection automatically pushes down query predicates (where clauses, orderBy, limit, and offset) to the collection's `queryFn`. This allows you to fetch only the data needed for each specific query, rather than fetching the entire dataset. + +This option could be opted in by passing the `syncMode: 'on-demand'` option to `powerSyncCollectionOptions`. We wouldn't expose the `queryFn` on this level as we should be able to resolve queryFn logic internally. + +An example pulled from their docs: + +``` +const productsCollection = createCollection( + queryCollectionOptions({ + syncMode: 'on-demand', // Enable predicate push-down + + queryFn: async (ctx) => { + const { limit, offset, where, orderBy } = ctx.meta.loadSubsetOptions + + // Build query parameters from parsed filters + const params = new URLSearchParams() + + // Add filters + const parsed = parseLoadSubsetOptions({ where, orderBy, limit }) + + // Add filters + parsed.filters.forEach(({ field, operator, value }) => { + const fieldName = field.join('.') + if (operator === 'eq') { + params.set(fieldName, String(value)) + } else if (operator === 'lt') { + params.set(`${fieldName}_lt`, String(value)) + } else if (operator === 'gt') { + params.set(`${fieldName}_gt`, String(value)) + } + }) + // Add sorting + // Add limit + // Add offset for pagination + + // or some local query against a sqlite db + const response = await fetch(`/api/products?${params}`) + return response.json() + }, + }) + ``` + +> Add PoC of mechanism/implementation/understanding/draw the rest of the owl + +## Ramblings +### Using triggers +Remember that db triggers don't work on views, and if you need to work against underlying tables json_extract will be needed - relevant when processing the subset loading. + +### Query cursor support +The electric-db-collection package supports cursor-based pagination which I believe ties in with query driven-sync. We should be able to support this by passing the cursor expressions to the `queryFn` and let the `queryFn` handle the pagination. + +### Live queries +Need to investigate how live queries work with query-driven sync. Do we need to do any additional work on top of the existing triggers? + +### Error handling +Need to investigate how error handling works with query-driven sync. Do we need to do any additional work on top of the existing error handling? + +## Optimising remote sync +Similar thoughts/suggestions were raised by Simon, Steven, and Kobie - for the second part of this work. +Query-driven sync optimizes the data loaded from your local SQLite database to the collection, but that doesn't change how much data is loaded into the database. A possible optimization that could be useful for both `eager` and `on-demand` modes is tying in sync streams/sync stream parameters to limit the amount of data loaded to the database. + + +Paraphrasing Simon: +There might be a way to subscribe in PowerSync to sync streams with parameters depending on TanStackDB filters. Say I have a stream defined as `SELECT * FROM notes WHERE owner = request.user_id() AND project_id = subscription.parameter('project')`. It would be really cool if: +``` +const { data: todos } = useLiveQuery((q) => + q.from({ note: notesCollection }).where(({ note }) => note.project_id = 'my_project_id') +) +``` +could somehow realize that the filter correlates with a subscription parameter and auto-subscribe with `db.syncStream('name', {project_id: 'my_project_id'})`. + +And if you had two of the `useLiveQuery()` hooks like above filtering for different project ids, sync streams just let you do two subscriptions. + +Initial unknown is how you map collections -> tables -> sync stream names. Which will be quite complicated. The service would have to generate that information by looking at all sync streams to generate this pattern. + +Doing that in `queryFn` by analyzing `where` seems like a clean way to go about this though. Especially since we can await `waitForFirstSync()` on the stream subscription we're creating to show a loading state until the subset of data requested has actually been synced, that should nicely map to what TSDB expects. + +So if we listed all the possible strategies (ignoring the progressive mode): +1. On-demand with sync streams incorporated +2. Eager with sync streams incorporated +3. On-demand +4. Eager \ No newline at end of file diff --git a/packages/powersync-db-collection/src/index.ts b/packages/powersync-db-collection/src/index.ts index 6879d7a22..f8d092805 100644 --- a/packages/powersync-db-collection/src/index.ts +++ b/packages/powersync-db-collection/src/index.ts @@ -1,3 +1,4 @@ export * from './definitions' export * from './powersync' export * from './PowerSyncTransactor' +export * from './sqlite-compiler' diff --git a/packages/powersync-db-collection/src/powersync.ts b/packages/powersync-db-collection/src/powersync.ts index bc2e85bc2..c83fbaee0 100644 --- a/packages/powersync-db-collection/src/powersync.ts +++ b/packages/powersync-db-collection/src/powersync.ts @@ -5,6 +5,7 @@ import { DEFAULT_BATCH_SIZE } from './definitions' import { asPowerSyncRecord, mapOperation } from './helpers' import { convertTableToSchema } from './schema' import { serializeForSQLite } from './serialization' +import { compileSQLite } from './sqlite-compiler' import type { AnyTableColumnType, ExtractedTable, @@ -24,7 +25,7 @@ import type { PowerSyncCollectionUtils, } from './definitions' import type { PendingOperation } from './PendingOperationStore' -import type { SyncConfig } from '@tanstack/db' +import type { LoadSubsetOptions, SyncConfig } from '@tanstack/db' import type { StandardSchemaV1 } from '@standard-schema/spec' import type { Table, TriggerDiffRecord } from '@powersync/common' @@ -225,6 +226,7 @@ export function powerSyncCollectionOptions< table, schema: inputSchema, syncBatchSize = DEFAULT_BATCH_SIZE, + syncMode = `eager`, ...restConfig } = config @@ -299,6 +301,63 @@ export function powerSyncCollectionOptions< const { begin, write, commit, markReady } = params const abortController = new AbortController() + /** + * Loads a subset of data from SQLite based on the provided options. + * Called by TanStack DB when a query needs data in on-demand mode. + */ + const loadSubsetImpl = async ( + options: LoadSubsetOptions, + ): Promise => { + try { + const compiled = compileSQLite(options) + + // Build the SELECT query + let sql = `SELECT * FROM ${viewName}` + const queryParams: Array = [...compiled.params] + + if (compiled.where) { + sql += ` WHERE ${compiled.where}` + } + + if (compiled.orderBy) { + sql += ` ORDER BY ${compiled.orderBy}` + } + + if (compiled.limit !== undefined) { + sql += ` LIMIT ?` + queryParams.push(compiled.limit) + } + + database.logger.debug?.( + `loadSubset for ${viewName}: ${sql}`, + queryParams, + ) + + // Execute query against PowerSync SQLite + const rows = await database.getAll(sql, queryParams) + + // Write rows to TanStack collection + begin() + for (const row of rows) { + write({ + type: `insert`, + value: deserializeSyncRow(row), + }) + } + commit() + + database.logger.debug?.( + `loadSubset loaded ${rows.length} rows for ${viewName}`, + ) + } catch (error) { + // Log error but don't throw - keeps collection in ready state + database.logger.error( + `loadSubset failed for ${viewName}`, + error, + ) + } + } + // The sync function needs to be synchronous async function start() { database.logger.info( @@ -372,6 +431,15 @@ export function powerSyncCollectionOptions< }, hooks: { beforeCreate: async (context) => { + if (syncMode === 'on-demand') { + // On-demand: skip initial load, mark ready immediately + markReady() + database.logger.info( + `Sync is ready for ${viewName} into ${trackedTableName} in on-demand mode`, + ) + return + } + let currentBatchCount = syncBatchSize let cursor = 0 while (currentBatchCount == syncBatchSize) { @@ -419,12 +487,18 @@ export function powerSyncCollectionOptions< ), ) - return () => { - database.logger.info( - `Sync has been stopped for ${viewName} into ${trackedTableName}`, - ) - abortController.abort() - } + return { + cleanup: () => { + database.logger.info( + `Sync has been stopped for ${viewName} into ${trackedTableName}`, + ) + abortController.abort() + }, + loadSubset: + syncMode === `on-demand` ? loadSubsetImpl : undefined, + unloadSubset: + syncMode === `on-demand` ? () => {} : undefined, + } }, // Expose the getSyncMetadata function getSyncMetadata: undefined, @@ -442,6 +516,7 @@ export function powerSyncCollectionOptions< getKey, // Syncing should start immediately since we need to monitor the changes for mutations startSync: true, + syncMode, sync, onInsert: async (params) => { // The transaction here should only ever contain a single insert mutation diff --git a/packages/powersync-db-collection/src/sqlite-compiler.ts b/packages/powersync-db-collection/src/sqlite-compiler.ts new file mode 100644 index 000000000..2e2e9de14 --- /dev/null +++ b/packages/powersync-db-collection/src/sqlite-compiler.ts @@ -0,0 +1,273 @@ +import type { IR, LoadSubsetOptions } from '@tanstack/db' + +/** + * Result of compiling LoadSubsetOptions to SQLite + */ +export interface SQLiteCompiledQuery { + /** The WHERE clause (without "WHERE" keyword), e.g., "price > ?" */ + where?: string + /** The ORDER BY clause (without "ORDER BY" keyword), e.g., "price DESC" */ + orderBy?: string + /** The LIMIT value */ + limit?: number + /** Parameter values in order, to be passed to SQLite query */ + params: Array +} + +/** + * Compiles TanStack DB LoadSubsetOptions to SQLite query components. + * + * @example + * ```typescript + * const compiled = compileSQLite({ + * where: { type: 'func', name: 'gt', args: [ + * { type: 'ref', path: ['price'] }, + * { type: 'val', value: 100 } + * ]}, + * orderBy: [{ expression: { type: 'ref', path: ['price'] }, compareOptions: { direction: 'desc', nulls: 'last' } }], + * limit: 50 + * }) + * // Result: { where: '"price" > ?', orderBy: '"price" DESC', limit: 50, params: [100] } + * ``` + */ +export function compileSQLite(options: LoadSubsetOptions): SQLiteCompiledQuery { + const { where, orderBy, limit } = options + + const params: Array = [] + const result: SQLiteCompiledQuery = { params } + + if (where) { + result.where = compileExpression(where, params) + } + + if (orderBy) { + result.orderBy = compileOrderBy(orderBy, params) + } + + if (limit !== undefined) { + result.limit = limit + } + + return result +} + +/** + * Quote SQLite identifiers to handle column names correctly. + * SQLite uses double quotes for identifiers. + */ +function quoteIdentifier(name: string): string { + // Escape any double quotes in the name by doubling them + const escaped = name.replace(/"/g, `""`) + return `"${escaped}"` +} + +/** + * Compiles a BasicExpression to a SQL string, mutating the params array. + */ +function compileExpression( + exp: IR.BasicExpression, + params: Array, +): string { + switch (exp.type) { + case `val`: + params.push(exp.value) + return `?` + case `ref`: + if (exp.path.length !== 1) { + throw new Error( + `SQLite compiler doesn't support nested properties: ${exp.path.join(`.`)}`, + ) + } + return quoteIdentifier(exp.path[0]!) + case `func`: + return compileFunction(exp, params) + default: + throw new Error(`Unknown expression type: ${(exp as any).type}`) + } +} + +/** + * Compiles an OrderBy array to a SQL ORDER BY clause. + */ +function compileOrderBy( + orderBy: IR.OrderBy, + params: Array, +): string { + const clauses = orderBy.map((clause: IR.OrderByClause) => + compileOrderByClause(clause, params), + ) + return clauses.join(`, `) +} + +/** + * Compiles a single OrderByClause to SQL. + */ +function compileOrderByClause( + clause: IR.OrderByClause, + params: Array, +): string { + const { expression, compareOptions } = clause + let sql = compileExpression(expression, params) + + if (compareOptions.direction === `desc`) { + sql = `${sql} DESC` + } + + // SQLite supports NULLS FIRST/LAST (since 3.30.0) + if (compareOptions.nulls === `first`) { + sql = `${sql} NULLS FIRST` + } else { + // Default to NULLS LAST (nulls === 'last') + sql = `${sql} NULLS LAST` + } + + return sql +} + +/** + * Check if a BasicExpression represents a null/undefined value + */ +function isNullValue(exp: IR.BasicExpression): boolean { + return exp.type === `val` && (exp.value === null || exp.value === undefined) +} + +/** + * Compiles a function expression (operator) to SQL. + */ +function compileFunction( + exp: IR.Func, + params: Array, +): string { + const { name, args } = exp + + // Check for null values in comparison operators + if (isComparisonOp(name)) { + const hasNullArg = args.some((arg: IR.BasicExpression) => isNullValue(arg)) + if (hasNullArg) { + throw new Error( + `Cannot use null/undefined with '${name}' operator. ` + + `Use isNull() to check for null values.`, + ) + } + } + + // Compile arguments + const compiledArgs = args.map((arg: IR.BasicExpression) => + compileExpression(arg, params), + ) + + // Handle different operator types + switch (name) { + // Binary comparison operators + case `eq`: + case `gt`: + case `gte`: + case `lt`: + case `lte`: { + if (compiledArgs.length !== 2) { + throw new Error(`${name} expects 2 arguments`) + } + const opSymbol = getComparisonOp(name) + return `${compiledArgs[0]} ${opSymbol} ${compiledArgs[1]}` + } + + // Logical operators + case `and`: + case `or`: { + if (compiledArgs.length < 2) { + throw new Error(`${name} expects at least 2 arguments`) + } + const opKeyword = name === `and` ? `AND` : `OR` + return compiledArgs.map((arg: string) => `(${arg})`).join(` ${opKeyword} `) + } + + case `not`: { + if (compiledArgs.length !== 1) { + throw new Error(`not expects 1 argument`) + } + // Check if argument is isNull/isUndefined for IS NOT NULL + const arg = args[0] + if (arg && arg.type === `func`) { + if (arg.name === `isNull` || arg.name === `isUndefined`) { + const innerArg = compileExpression(arg.args[0]!, params) + return `${innerArg} IS NOT NULL` + } + } + return `NOT (${compiledArgs[0]})` + } + + // Null checking + case `isNull`: + case `isUndefined`: { + if (compiledArgs.length !== 1) { + throw new Error(`${name} expects 1 argument`) + } + return `${compiledArgs[0]} IS NULL` + } + + // IN operator + case `in`: { + if (compiledArgs.length !== 2) { + throw new Error(`in expects 2 arguments (column and array)`) + } + // The second argument should be an array value + // We need to handle this specially - expand the array into multiple placeholders + const lastParamIndex = params.length - 1 + const arrayValue = params[lastParamIndex] + + if (!Array.isArray(arrayValue)) { + throw new Error(`in operator requires an array value`) + } + + // Remove the array param and add individual values + params.pop() + const placeholders = arrayValue.map(() => { + params.push(arrayValue[params.length - lastParamIndex]) + return `?` + }) + + // Re-add individual values properly + params.length = lastParamIndex // Reset to before array + for (const val of arrayValue) { + params.push(val) + } + + return `${compiledArgs[0]} IN (${placeholders.join(`, `)})` + } + + // String operators + case `like`: { + if (compiledArgs.length !== 2) { + throw new Error(`like expects 2 arguments`) + } + return `${compiledArgs[0]} LIKE ${compiledArgs[1]}` + } + + default: + throw new Error( + `Operator '${name}' is not supported in PowerSync on-demand sync. ` + + `Supported operators: eq, gt, gte, lt, lte, and, or, not, isNull, in, like`, + ) + } +} + +/** + * Check if operator is a comparison operator + */ +function isComparisonOp(name: string): boolean { + return [`eq`, `gt`, `gte`, `lt`, `lte`, `like`].includes(name) +} + +/** + * Get the SQL symbol for a comparison operator + */ +function getComparisonOp(name: string): string { + const ops: Record = { + eq: `=`, + gt: `>`, + gte: `>=`, + lt: `<`, + lte: `<=`, + } + return ops[name]! +} diff --git a/packages/powersync-db-collection/tests/on-demand-sync.test.ts b/packages/powersync-db-collection/tests/on-demand-sync.test.ts new file mode 100644 index 000000000..d9bf4a1b2 --- /dev/null +++ b/packages/powersync-db-collection/tests/on-demand-sync.test.ts @@ -0,0 +1,139 @@ +import { randomUUID } from 'node:crypto' +import { tmpdir } from 'node:os' +import { PowerSyncDatabase, Schema, Table, column } from '@powersync/node' +import { + createCollection, + eq, + gt, + liveQueryCollectionOptions, +} from '@tanstack/db' +import { describe, expect, it, onTestFinished, vi } from 'vitest' +import { powerSyncCollectionOptions } from '../src' + +const APP_SCHEMA = new Schema({ + products: new Table({ + name: column.text, + price: column.integer, + category: column.text, + }), +}) + +describe(`On-Demand Sync Mode`, () => { + async function createDatabase() { + const db = new PowerSyncDatabase({ + database: { + dbFilename: `test-on-demand-${randomUUID()}.sqlite`, + dbLocation: tmpdir(), + implementation: { type: `node:sqlite` }, + }, + schema: APP_SCHEMA, + }) + onTestFinished(async () => { + await db.disconnectAndClear() + await db.close() + }) + await db.disconnectAndClear() + return db + } + + async function createTestProducts(db: PowerSyncDatabase) { + await db.execute(` + INSERT INTO products (id, name, price, category) + VALUES + (uuid(), 'Product A', 50, 'electronics'), + (uuid(), 'Product B', 150, 'electronics'), + (uuid(), 'Product C', 25, 'clothing'), + (uuid(), 'Product D', 200, 'electronics'), + (uuid(), 'Product E', 75, 'clothing') + `) + } + + it(`should not load any data initially in on-demand mode`, async () => { + const db = await createDatabase() + await createTestProducts(db) + + // Verify data exists in SQLite + const sqliteCount = await db.get<{ count: number }>( + `SELECT COUNT(*) as count FROM products`, + ) + expect(sqliteCount.count).toBe(5) + + // Create collection with on-demand sync mode + const collection = createCollection( + powerSyncCollectionOptions({ + database: db, + table: APP_SCHEMA.props.products, + syncMode: `on-demand`, + }), + ) + onTestFinished(() => collection.cleanup()) + + // Wait for collection to be ready + await collection.stateWhenReady() + + // Verify NO data was loaded into the collection + expect(collection.size).toBe(0) + }) + + it(`should load only matching data when live query is created`, async () => { + const db = await createDatabase() + await createTestProducts(db) + + // Create collection with on-demand sync mode + const collection = createCollection( + powerSyncCollectionOptions({ + database: db, + table: APP_SCHEMA.props.products, + syncMode: `on-demand`, + }), + ) + onTestFinished(() => collection.cleanup()) + + await collection.stateWhenReady() + + // Verify collection is empty initially + expect(collection.size).toBe(0) + + // Create a live query that filters for electronics over $100 + const expensiveElectronics = createCollection( + liveQueryCollectionOptions({ + query: (q) => + q + .from({ product: collection }) + .where(({ product }) => + eq(product.category, `electronics`), + ) + .where(({ product }) => gt(product.price, 100)) + .select(({ product }) => ({ + id: product.id, + name: product.name, + price: product.price, + category: product.category, + })), + }), + ) + onTestFinished(() => expensiveElectronics.cleanup()) + + // Preload triggers the live query to request data via loadSubset + await expensiveElectronics.preload() + + // Wait for loadSubset to complete and data to appear + await vi.waitFor( + () => { + // The live query should have triggered loadSubset + // Only electronics with price > 100 should match: Product B (150), Product D (200) + expect(expensiveElectronics.size).toBe(2) + }, + { timeout: 2000 }, + ) + + // Verify the correct products were loaded + const loadedProducts = expensiveElectronics.toArray + const names = loadedProducts.map((p) => p.name).sort() + expect(names).toEqual([`Product B`, `Product D`]) + + // Verify prices are correct + const prices = loadedProducts.map((p) => p.price).sort((a, b) => a! - b!) + expect(prices).toEqual([150, 200]) + }) +}) diff --git a/packages/powersync-db-collection/tests/sqlite-compiler.test.ts b/packages/powersync-db-collection/tests/sqlite-compiler.test.ts new file mode 100644 index 000000000..245e3101b --- /dev/null +++ b/packages/powersync-db-collection/tests/sqlite-compiler.test.ts @@ -0,0 +1,255 @@ +import { describe, expect, it } from 'vitest' +import { compileSQLite } from '../src/sqlite-compiler' +import type { IR } from '@tanstack/db' + +// Helper to create expression nodes +const val = (value: T): IR.BasicExpression => ({ type: `val`, value }) +const ref = (path: Array): IR.BasicExpression => ({ + type: `ref`, + path, +}) +const func = ( + name: string, + args: Array>, +): IR.BasicExpression => ({ + type: `func`, + name, + args, +}) + +describe(`SQLite Compiler`, () => { + describe(`where clause compilation`, () => { + it(`should compile eq operator`, () => { + const result = compileSQLite({ + where: func(`eq`, [ref([`name`]), val(`test`)]), + }) + + expect(result.where).toBe(`"name" = ?`) + expect(result.params).toEqual([`test`]) + }) + + it(`should compile gt operator`, () => { + const result = compileSQLite({ + where: func(`gt`, [ref([`price`]), val(100)]), + }) + + expect(result.where).toBe(`"price" > ?`) + expect(result.params).toEqual([100]) + }) + + it(`should compile gte operator`, () => { + const result = compileSQLite({ + where: func(`gte`, [ref([`price`]), val(100)]), + }) + + expect(result.where).toBe(`"price" >= ?`) + expect(result.params).toEqual([100]) + }) + + it(`should compile lt operator`, () => { + const result = compileSQLite({ + where: func(`lt`, [ref([`price`]), val(100)]), + }) + + expect(result.where).toBe(`"price" < ?`) + expect(result.params).toEqual([100]) + }) + + it(`should compile lte operator`, () => { + const result = compileSQLite({ + where: func(`lte`, [ref([`price`]), val(100)]), + }) + + expect(result.where).toBe(`"price" <= ?`) + expect(result.params).toEqual([100]) + }) + + it(`should compile and operator with two conditions`, () => { + const result = compileSQLite({ + where: func(`and`, [ + func(`gt`, [ref([`price`]), val(50)]), + func(`lt`, [ref([`price`]), val(100)]), + ]), + }) + + expect(result.where).toBe(`("price" > ?) AND ("price" < ?)`) + expect(result.params).toEqual([50, 100]) + }) + + it(`should compile and operator with multiple conditions`, () => { + const result = compileSQLite({ + where: func(`and`, [ + func(`eq`, [ref([`status`]), val(`active`)]), + func(`gt`, [ref([`price`]), val(50)]), + func(`lt`, [ref([`price`]), val(100)]), + ]), + }) + + expect(result.where).toBe( + `("status" = ?) AND ("price" > ?) AND ("price" < ?)`, + ) + expect(result.params).toEqual([`active`, 50, 100]) + }) + + it(`should compile or operator`, () => { + const result = compileSQLite({ + where: func(`or`, [ + func(`eq`, [ref([`status`]), val(`active`)]), + func(`eq`, [ref([`status`]), val(`pending`)]), + ]), + }) + + expect(result.where).toBe(`("status" = ?) OR ("status" = ?)`) + expect(result.params).toEqual([`active`, `pending`]) + }) + + it(`should compile isNull operator`, () => { + const result = compileSQLite({ + where: func(`isNull`, [ref([`deleted_at`])]), + }) + + expect(result.where).toBe(`"deleted_at" IS NULL`) + expect(result.params).toEqual([]) + }) + + it(`should compile not(isNull) as IS NOT NULL`, () => { + const result = compileSQLite({ + where: func(`not`, [func(`isNull`, [ref([`deleted_at`])])]), + }) + + expect(result.where).toBe(`"deleted_at" IS NOT NULL`) + expect(result.params).toEqual([]) + }) + + it(`should compile like operator`, () => { + const result = compileSQLite({ + where: func(`like`, [ref([`name`]), val(`%test%`)]), + }) + + expect(result.where).toBe(`"name" LIKE ?`) + expect(result.params).toEqual([`%test%`]) + }) + + it(`should escape quotes in column names`, () => { + const result = compileSQLite({ + where: func(`eq`, [ref([`col"name`]), val(`test`)]), + }) + + expect(result.where).toBe(`"col""name" = ?`) + }) + + it(`should throw error for null values in comparison operators`, () => { + expect(() => + compileSQLite({ + where: func(`eq`, [ref([`name`]), val(null)]), + }), + ).toThrow(`Cannot use null/undefined with 'eq' operator`) + }) + + it(`should throw error for unsupported operators`, () => { + expect(() => + compileSQLite({ + where: func(`ilike`, [ref([`name`]), val(`%test%`)]), + }), + ).toThrow(`Operator 'ilike' is not supported`) + }) + }) + + describe(`orderBy compilation`, () => { + it(`should compile simple orderBy`, () => { + const result = compileSQLite({ + orderBy: [ + { + expression: ref([`price`]), + compareOptions: { direction: `asc`, nulls: `last` }, + }, + ], + }) + + expect(result.orderBy).toBe(`"price" NULLS LAST`) + expect(result.params).toEqual([]) + }) + + it(`should compile orderBy with desc direction`, () => { + const result = compileSQLite({ + orderBy: [ + { + expression: ref([`price`]), + compareOptions: { direction: `desc`, nulls: `last` }, + }, + ], + }) + + expect(result.orderBy).toBe(`"price" DESC NULLS LAST`) + }) + + it(`should compile orderBy with nulls first`, () => { + const result = compileSQLite({ + orderBy: [ + { + expression: ref([`price`]), + compareOptions: { direction: `asc`, nulls: `first` }, + }, + ], + }) + + expect(result.orderBy).toBe(`"price" NULLS FIRST`) + }) + + it(`should compile multiple orderBy clauses`, () => { + const result = compileSQLite({ + orderBy: [ + { + expression: ref([`category`]), + compareOptions: { direction: `asc`, nulls: `last` }, + }, + { + expression: ref([`price`]), + compareOptions: { direction: `desc`, nulls: `last` }, + }, + ], + }) + + expect(result.orderBy).toBe(`"category" NULLS LAST, "price" DESC NULLS LAST`) + }) + }) + + describe(`limit`, () => { + it(`should pass through limit`, () => { + const result = compileSQLite({ + limit: 50, + }) + + expect(result.limit).toBe(50) + }) + }) + + describe(`combined options`, () => { + it(`should compile where, orderBy, and limit together`, () => { + const result = compileSQLite({ + where: func(`gt`, [ref([`price`]), val(100)]), + orderBy: [ + { + expression: ref([`price`]), + compareOptions: { direction: `desc`, nulls: `last` }, + }, + ], + limit: 10, + }) + + expect(result.where).toBe(`"price" > ?`) + expect(result.orderBy).toBe(`"price" DESC NULLS LAST`) + expect(result.limit).toBe(10) + expect(result.params).toEqual([100]) + }) + + it(`should handle empty options`, () => { + const result = compileSQLite({}) + + expect(result.where).toBeUndefined() + expect(result.orderBy).toBeUndefined() + expect(result.limit).toBeUndefined() + expect(result.params).toEqual([]) + }) + }) +}) From 6175ab573524acdba70dd4f37968c1e728f4978a Mon Sep 17 00:00:00 2001 From: Christiaan Landman Date: Fri, 6 Feb 2026 16:26:36 +0200 Subject: [PATCH 02/15] Added failing tests. --- .../tests/on-demand-sync.test.ts | 149 ++++++++++++++++++ 1 file changed, 149 insertions(+) diff --git a/packages/powersync-db-collection/tests/on-demand-sync.test.ts b/packages/powersync-db-collection/tests/on-demand-sync.test.ts index d9bf4a1b2..9aaf48027 100644 --- a/packages/powersync-db-collection/tests/on-demand-sync.test.ts +++ b/packages/powersync-db-collection/tests/on-demand-sync.test.ts @@ -136,4 +136,153 @@ describe(`On-Demand Sync Mode`, () => { const prices = loadedProducts.map((p) => p.price).sort((a, b) => a! - b!) expect(prices).toEqual([150, 200]) }) + + it(`should reactively update live query when new matching data is inserted into SQLite`, async () => { + const db = await createDatabase() + await createTestProducts(db) + + // Create collection with on-demand sync mode + const collection = createCollection( + powerSyncCollectionOptions({ + database: db, + table: APP_SCHEMA.props.products, + syncMode: `on-demand`, + }), + ) + onTestFinished(() => collection.cleanup()) + + await collection.stateWhenReady() + + // Create a live query that filters for electronics over $100 + const expensiveElectronics = createCollection( + liveQueryCollectionOptions({ + query: (q) => + q + .from({ product: collection }) + .where(({ product }) => eq(product.category, `electronics`)) + .where(({ product }) => gt(product.price, 100)) + .select(({ product }) => ({ + id: product.id, + name: product.name, + price: product.price, + category: product.category, + })), + }), + ) + onTestFinished(() => expensiveElectronics.cleanup()) + + // Preload triggers the live query to request data via loadSubset + await expensiveElectronics.preload() + + // Wait for initial data to load + await vi.waitFor( + () => { + expect(expensiveElectronics.size).toBe(2) + }, + { timeout: 2000 }, + ) + + // Verify initial products + let names = expensiveElectronics.toArray.map((p) => p.name).sort() + expect(names).toEqual([`Product B`, `Product D`]) + + // Now insert a new matching product directly into SQLite + await db.execute(` + INSERT INTO products (id, name, price, category) + VALUES (uuid(), 'Product F', 300, 'electronics') + `) + + // Wait for the diff trigger to propagate the change to the live query + await vi.waitFor( + () => { + // Should now have 3 products: B, D, and F + expect(expensiveElectronics.size).toBe(3) + }, + { timeout: 2000 }, + ) + + // Verify all products including the new one + names = expensiveElectronics.toArray.map((p) => p.name).sort() + expect(names).toEqual([`Product B`, `Product D`, `Product F`]) + + // Verify the new product's price + const productF = expensiveElectronics.toArray.find((p) => p.name === `Product F`) + expect(productF?.price).toBe(300) + }) + + it(`should not include non-matching data inserted into SQLite`, async () => { + const db = await createDatabase() + await createTestProducts(db) + + // Create collection with on-demand sync mode + const collection = createCollection( + powerSyncCollectionOptions({ + database: db, + table: APP_SCHEMA.props.products, + syncMode: `on-demand`, + }), + ) + onTestFinished(() => collection.cleanup()) + + await collection.stateWhenReady() + + // Create a live query that filters for electronics over $100 + const expensiveElectronics = createCollection( + liveQueryCollectionOptions({ + query: (q) => + q + .from({ product: collection }) + .where(({ product }) => eq(product.category, `electronics`)) + .where(({ product }) => gt(product.price, 100)) + .select(({ product }) => ({ + id: product.id, + name: product.name, + price: product.price, + category: product.category, + })), + }), + ) + onTestFinished(() => expensiveElectronics.cleanup()) + + // Preload triggers the live query to request data via loadSubset + await expensiveElectronics.preload() + + // Wait for initial data to load + await vi.waitFor( + () => { + expect(expensiveElectronics.size).toBe(2) + }, + { timeout: 2000 }, + ) + + // Verify initial products + const initialNames = expensiveElectronics.toArray.map((p) => p.name).sort() + expect(initialNames).toEqual([`Product B`, `Product D`]) + + // Insert a non-matching product: electronics but too cheap + await db.execute(` + INSERT INTO products (id, name, price, category) + VALUES (uuid(), 'Cheap Electronics', 50, 'electronics') + `) + + // Insert another non-matching product: expensive but wrong category + await db.execute(` + INSERT INTO products (id, name, price, category) + VALUES (uuid(), 'Expensive Clothing', 500, 'clothing') + `) + + // Wait a bit to allow any potential (incorrect) updates to propagate + await new Promise((resolve) => setTimeout(resolve, 200)) + + // Verify the live query still has only the original 2 products + expect(expensiveElectronics.size).toBe(2) + + // Verify the names haven't changed + const finalNames = expensiveElectronics.toArray.map((p) => p.name).sort() + expect(finalNames).toEqual([`Product B`, `Product D`]) + + // Verify the base collection only contains items matching active predicates + // Non-matching diff trigger items are filtered out in on-demand mode + expect(collection.size).toBe(2) // Only the 2 matching items from loadSubset + }) }) From 0f459d51c7b5e3943099d3bdeb574381334589f3 Mon Sep 17 00:00:00 2001 From: Christiaan Landman Date: Mon, 9 Feb 2026 15:19:20 +0200 Subject: [PATCH 03/15] OnChange-OnDemand case now applies where via json_extract to the compiled sqlite helper. Supporting a few more operators. Handling multiple live predicates and unloads. --- .../research/writeup.md | 1 + .../powersync-db-collection/src/powersync.ts | 58 ++++- .../src/sqlite-compiler.ts | 75 ++++++- .../tests/on-demand-sync.test.ts | 205 +++++++++++++++++- .../tests/sqlite-compiler.test.ts | 61 ++++-- 5 files changed, 368 insertions(+), 32 deletions(-) diff --git a/packages/powersync-db-collection/research/writeup.md b/packages/powersync-db-collection/research/writeup.md index 3586f3c38..c5217f670 100644 --- a/packages/powersync-db-collection/research/writeup.md +++ b/packages/powersync-db-collection/research/writeup.md @@ -62,6 +62,7 @@ const productsCollection = createCollection( > Add PoC of mechanism/implementation/understanding/draw the rest of the owl + ## Ramblings ### Using triggers Remember that db triggers don't work on views, and if you need to work against underlying tables json_extract will be needed - relevant when processing the subset loading. diff --git a/packages/powersync-db-collection/src/powersync.ts b/packages/powersync-db-collection/src/powersync.ts index c83fbaee0..d8dece38b 100644 --- a/packages/powersync-db-collection/src/powersync.ts +++ b/packages/powersync-db-collection/src/powersync.ts @@ -1,3 +1,4 @@ +import { or } from '@tanstack/db' import { DiffTriggerOperation, sanitizeSQL } from '@powersync/common' import { PendingOperationStore } from './PendingOperationStore' import { PowerSyncTransactor } from './PowerSyncTransactor' @@ -301,6 +302,10 @@ export function powerSyncCollectionOptions< const { begin, write, commit, markReady } = params const abortController = new AbortController() + // Tracks all active WHERE expressions for on-demand sync filtering. + // Each loadSubset call pushes its predicate; unloadSubset removes it by reference. + const activeWhereExpressions: Array = [] + /** * Loads a subset of data from SQLite based on the provided options. * Called by TanStack DB when a query needs data in on-demand mode. @@ -308,6 +313,7 @@ export function powerSyncCollectionOptions< const loadSubsetImpl = async ( options: LoadSubsetOptions, ): Promise => { + activeWhereExpressions.push(options.where) try { const compiled = compileSQLite(options) @@ -335,7 +341,7 @@ export function powerSyncCollectionOptions< // Execute query against PowerSync SQLite const rows = await database.getAll(sql, queryParams) - + console.log(`rows`, rows) // Write rows to TanStack collection begin() for (const row of rows) { @@ -358,6 +364,13 @@ export function powerSyncCollectionOptions< } } + const unloadSubsetImpl = (options: LoadSubsetOptions): void => { + const idx = activeWhereExpressions.indexOf(options.where) + if (idx !== -1) { + activeWhereExpressions.splice(idx, 1) + } + } + // The sync function needs to be synchronous async function start() { database.logger.info( @@ -369,9 +382,48 @@ export function powerSyncCollectionOptions< await database .writeTransaction(async (context) => { begin() + + let selectSQL = `SELECT * FROM ${trackedTableName}` + let selectParams: Array = [] + + if (syncMode === 'on-demand') { + if (activeWhereExpressions.length === 0) { + // No active query yet — clear tracked table and skip + await context.execute(`DELETE FROM ${trackedTableName}`) + commit() + return + } + + // Combine all active predicates with OR + const combinedWhere = + activeWhereExpressions.length === 1 + ? activeWhereExpressions[0] + : or( + activeWhereExpressions[0]!, + activeWhereExpressions[1]!, + ...activeWhereExpressions.slice(2), + ) + + // Filter operations using json_extract on the value column + const compiled = compileSQLite( + { where: combinedWhere }, + { jsonColumn: 'value' }, + ) + if (compiled.where) { + selectSQL += ` WHERE ${compiled.where}` + selectParams = [...compiled.params] + } + + // ignore order by and limit - irrelevant for deciding what to load + } + + selectSQL += ` ORDER BY timestamp ASC` + const operations = await context.getAll( - `SELECT * FROM ${trackedTableName} ORDER BY timestamp ASC`, + selectSQL, + selectParams, ) + console.log(`operations`, operations) const pendingOperations: Array = [] for (const op of operations) { @@ -497,7 +549,7 @@ export function powerSyncCollectionOptions< loadSubset: syncMode === `on-demand` ? loadSubsetImpl : undefined, unloadSubset: - syncMode === `on-demand` ? () => {} : undefined, + syncMode === `on-demand` ? unloadSubsetImpl : undefined, } }, // Expose the getSyncMetadata function diff --git a/packages/powersync-db-collection/src/sqlite-compiler.ts b/packages/powersync-db-collection/src/sqlite-compiler.ts index 2e2e9de14..9305bc05e 100644 --- a/packages/powersync-db-collection/src/sqlite-compiler.ts +++ b/packages/powersync-db-collection/src/sqlite-compiler.ts @@ -14,6 +14,18 @@ export interface SQLiteCompiledQuery { params: Array } +/** + * Options for controlling how SQL is compiled. + */ +export interface CompileSQLiteOptions { + /** + * When set, column references emit `json_extract(, '$.')` + * instead of `""`. The `id` column is excluded since it's stored + * as a direct column in the tracked table. + */ + jsonColumn?: string +} + /** * Compiles TanStack DB LoadSubsetOptions to SQLite query components. * @@ -30,18 +42,18 @@ export interface SQLiteCompiledQuery { * // Result: { where: '"price" > ?', orderBy: '"price" DESC', limit: 50, params: [100] } * ``` */ -export function compileSQLite(options: LoadSubsetOptions): SQLiteCompiledQuery { +export function compileSQLite(options: LoadSubsetOptions, compileOptions?: CompileSQLiteOptions): SQLiteCompiledQuery { const { where, orderBy, limit } = options const params: Array = [] const result: SQLiteCompiledQuery = { params } if (where) { - result.where = compileExpression(where, params) + result.where = compileExpression(where, params, compileOptions) } if (orderBy) { - result.orderBy = compileOrderBy(orderBy, params) + result.orderBy = compileOrderBy(orderBy, params, compileOptions) } if (limit !== undefined) { @@ -67,20 +79,26 @@ function quoteIdentifier(name: string): string { function compileExpression( exp: IR.BasicExpression, params: Array, + compileOptions?: CompileSQLiteOptions, ): string { switch (exp.type) { case `val`: params.push(exp.value) return `?` - case `ref`: + case `ref`: { if (exp.path.length !== 1) { throw new Error( `SQLite compiler doesn't support nested properties: ${exp.path.join(`.`)}`, ) } - return quoteIdentifier(exp.path[0]!) + const columnName = exp.path[0]! + if (compileOptions?.jsonColumn && columnName !== `id`) { + return `json_extract(${quoteIdentifier(compileOptions.jsonColumn)}, '$.${columnName}')` + } + return quoteIdentifier(columnName) + } case `func`: - return compileFunction(exp, params) + return compileFunction(exp, params, compileOptions) default: throw new Error(`Unknown expression type: ${(exp as any).type}`) } @@ -92,9 +110,10 @@ function compileExpression( function compileOrderBy( orderBy: IR.OrderBy, params: Array, + compileOptions?: CompileSQLiteOptions, ): string { const clauses = orderBy.map((clause: IR.OrderByClause) => - compileOrderByClause(clause, params), + compileOrderByClause(clause, params, compileOptions), ) return clauses.join(`, `) } @@ -105,9 +124,10 @@ function compileOrderBy( function compileOrderByClause( clause: IR.OrderByClause, params: Array, + compileOptions?: CompileSQLiteOptions, ): string { const { expression, compareOptions } = clause - let sql = compileExpression(expression, params) + let sql = compileExpression(expression, params, compileOptions) if (compareOptions.direction === `desc`) { sql = `${sql} DESC` @@ -137,6 +157,7 @@ function isNullValue(exp: IR.BasicExpression): boolean { function compileFunction( exp: IR.Func, params: Array, + compileOptions?: CompileSQLiteOptions, ): string { const { name, args } = exp @@ -153,7 +174,7 @@ function compileFunction( // Compile arguments const compiledArgs = args.map((arg: IR.BasicExpression) => - compileExpression(arg, params), + compileExpression(arg, params, compileOptions), ) // Handle different operator types @@ -189,7 +210,7 @@ function compileFunction( const arg = args[0] if (arg && arg.type === `func`) { if (arg.name === `isNull` || arg.name === `isUndefined`) { - const innerArg = compileExpression(arg.args[0]!, params) + const innerArg = compileExpression(arg.args[0]!, params, compileOptions) return `${innerArg} IS NOT NULL` } } @@ -243,10 +264,40 @@ function compileFunction( return `${compiledArgs[0]} LIKE ${compiledArgs[1]}` } + case `ilike`: { + if (compiledArgs.length !== 2) { + throw new Error(`ilike expects 2 arguments`) + } + return `${compiledArgs[0]} LIKE ${compiledArgs[1]} COLLATE NOCASE` + } + + // String case functions + case `upper`: { + if (compiledArgs.length !== 1) { + throw new Error(`upper expects 1 argument`) + } + return `UPPER(${compiledArgs[0]})` + } + + case `lower`: { + if (compiledArgs.length !== 1) { + throw new Error(`lower expects 1 argument`) + } + return `LOWER(${compiledArgs[0]})` + } + + // Null fallback + case `coalesce`: { + if (compiledArgs.length < 1) { + throw new Error(`coalesce expects at least 1 argument`) + } + return `COALESCE(${compiledArgs.join(`, `)})` + } + default: throw new Error( `Operator '${name}' is not supported in PowerSync on-demand sync. ` + - `Supported operators: eq, gt, gte, lt, lte, and, or, not, isNull, in, like`, + `Supported operators: eq, gt, gte, lt, lte, and, or, not, isNull, in, like, ilike, upper, lower, coalesce`, ) } } @@ -255,7 +306,7 @@ function compileFunction( * Check if operator is a comparison operator */ function isComparisonOp(name: string): boolean { - return [`eq`, `gt`, `gte`, `lt`, `lte`, `like`].includes(name) + return [`eq`, `gt`, `gte`, `lt`, `lte`, `like`, `ilike`].includes(name) } /** diff --git a/packages/powersync-db-collection/tests/on-demand-sync.test.ts b/packages/powersync-db-collection/tests/on-demand-sync.test.ts index 9aaf48027..73fadcc5b 100644 --- a/packages/powersync-db-collection/tests/on-demand-sync.test.ts +++ b/packages/powersync-db-collection/tests/on-demand-sync.test.ts @@ -70,7 +70,7 @@ describe(`On-Demand Sync Mode`, () => { // Wait for collection to be ready await collection.stateWhenReady() - + // Verify NO data was loaded into the collection expect(collection.size).toBe(0) }) @@ -285,4 +285,207 @@ describe(`On-Demand Sync Mode`, () => { // Non-matching diff trigger items are filtered out in on-demand mode expect(collection.size).toBe(2) // Only the 2 matching items from loadSubset }) + + it(`should handle multiple live queries without losing predicate coverage`, async () => { + const db = await createDatabase() + await createTestProducts(db) + + // Create collection with on-demand sync mode + const collection = createCollection( + powerSyncCollectionOptions({ + database: db, + table: APP_SCHEMA.props.products, + syncMode: `on-demand`, + }), + ) + onTestFinished(() => collection.cleanup()) + + await collection.stateWhenReady() + + // LQ1: electronics category + const electronicsQuery = createCollection( + liveQueryCollectionOptions({ + query: (q) => + q + .from({ product: collection }) + .where(({ product }) => eq(product.category, `electronics`)) + .select(({ product }) => ({ + id: product.id, + name: product.name, + price: product.price, + category: product.category, + })), + }), + ) + onTestFinished(() => electronicsQuery.cleanup()) + + await electronicsQuery.preload() + + await vi.waitFor( + () => { + // Products A(50), B(150), D(200) are electronics + expect(electronicsQuery.size).toBe(3) + }, + { timeout: 2000 }, + ) + + // LQ2: price > 100 (different predicate on same collection) + const expensiveQuery = createCollection( + liveQueryCollectionOptions({ + query: (q) => + q + .from({ product: collection }) + .where(({ product }) => gt(product.price, 100)) + .select(({ product }) => ({ + id: product.id, + name: product.name, + price: product.price, + category: product.category, + })), + }), + ) + onTestFinished(() => expensiveQuery.cleanup()) + + await expensiveQuery.preload() + + await vi.waitFor( + () => { + // Products B(150) and D(200) have price > 100 + expect(expensiveQuery.size).toBe(2) + }, + { timeout: 2000 }, + ) + + // Now insert a new product that matches LQ1 (electronics) but NOT LQ2 (price <= 100) + await db.execute(` + INSERT INTO products (id, name, price, category) + VALUES (uuid(), 'Cheap Gadget', 30, 'electronics') + `) + + // The diff trigger should use the OR of both active predicates: + // (category = 'electronics') OR (price > 100) + // 'Cheap Gadget' (electronics, price=30) matches the first predicate, + // so it should reach the base collection and appear in electronicsQuery. + await vi.waitFor( + () => { + expect(electronicsQuery.size).toBe(4) // 3 original + Cheap Gadget + }, + { timeout: 2000 }, + ) + }) + + it(`should handle three live queries with combined predicate coverage`, async () => { + const db = await createDatabase() + await createTestProducts(db) + + const collection = createCollection( + powerSyncCollectionOptions({ + database: db, + table: APP_SCHEMA.props.products, + syncMode: `on-demand`, + }), + ) + onTestFinished(() => collection.cleanup()) + + await collection.stateWhenReady() + + // LQ1: electronics category + const electronicsQuery = createCollection( + liveQueryCollectionOptions({ + query: (q) => + q + .from({ product: collection }) + .where(({ product }) => eq(product.category, `electronics`)) + .select(({ product }) => ({ + id: product.id, + name: product.name, + price: product.price, + category: product.category, + })), + }), + ) + onTestFinished(() => electronicsQuery.cleanup()) + + await electronicsQuery.preload() + + await vi.waitFor( + () => { + // Products A(50), B(150), D(200) are electronics + expect(electronicsQuery.size).toBe(3) + }, + { timeout: 2000 }, + ) + + // LQ2: price > 100 + const expensiveQuery = createCollection( + liveQueryCollectionOptions({ + query: (q) => + q + .from({ product: collection }) + .where(({ product }) => gt(product.price, 100)) + .select(({ product }) => ({ + id: product.id, + name: product.name, + price: product.price, + category: product.category, + })), + }), + ) + onTestFinished(() => expensiveQuery.cleanup()) + + await expensiveQuery.preload() + + await vi.waitFor( + () => { + // Products B(150) and D(200) have price > 100 + expect(expensiveQuery.size).toBe(2) + }, + { timeout: 2000 }, + ) + + // LQ3: clothing category — a third predicate to exercise the 3-arg OR path + const clothingQuery = createCollection( + liveQueryCollectionOptions({ + query: (q) => + q + .from({ product: collection }) + .where(({ product }) => eq(product.category, `clothing`)) + .select(({ product }) => ({ + id: product.id, + name: product.name, + price: product.price, + category: product.category, + })), + }), + ) + onTestFinished(() => clothingQuery.cleanup()) + + await clothingQuery.preload() + + await vi.waitFor( + () => { + // Products C(25) and E(75) are clothing + expect(clothingQuery.size).toBe(2) + }, + { timeout: 2000 }, + ) + + // Insert a product that only matches LQ3 (clothing, cheap) + // Diff trigger must OR all three predicates to catch this + await db.execute(` + INSERT INTO products (id, name, price, category) + VALUES (uuid(), 'New Shirt', 40, 'clothing') + `) + + await vi.waitFor( + () => { + expect(clothingQuery.size).toBe(3) // C, E + New Shirt + }, + { timeout: 2000 }, + ) + + // Verify the other queries are unaffected + expect(electronicsQuery.size).toBe(3) + expect(expensiveQuery.size).toBe(2) + }) }) diff --git a/packages/powersync-db-collection/tests/sqlite-compiler.test.ts b/packages/powersync-db-collection/tests/sqlite-compiler.test.ts index 245e3101b..606096b37 100644 --- a/packages/powersync-db-collection/tests/sqlite-compiler.test.ts +++ b/packages/powersync-db-collection/tests/sqlite-compiler.test.ts @@ -1,21 +1,11 @@ import { describe, expect, it } from 'vitest' +import { IR } from '@tanstack/db' import { compileSQLite } from '../src/sqlite-compiler' -import type { IR } from '@tanstack/db' +const val = (value: T) => new IR.Value(value) // Helper to create expression nodes -const val = (value: T): IR.BasicExpression => ({ type: `val`, value }) -const ref = (path: Array): IR.BasicExpression => ({ - type: `ref`, - path, -}) -const func = ( - name: string, - args: Array>, -): IR.BasicExpression => ({ - type: `func`, - name, - args, -}) +const ref = (path: Array) => new IR.PropRef(path) + const func = (name: string, args: Array) => new IR.Func(name, args) describe(`SQLite Compiler`, () => { describe(`where clause compilation`, () => { @@ -146,12 +136,51 @@ describe(`SQLite Compiler`, () => { ).toThrow(`Cannot use null/undefined with 'eq' operator`) }) + it(`should compile ilike operator`, () => { + const result = compileSQLite({ + where: func(`ilike`, [ref([`name`]), val(`%test%`)]), + }) + + expect(result.where).toBe(`"name" LIKE ? COLLATE NOCASE`) + expect(result.params).toEqual([`%test%`]) + }) + + it(`should compile upper function`, () => { + const result = compileSQLite({ + where: func(`eq`, [func(`upper`, [ref([`name`])]), val(`TEST`)]), + }) + + expect(result.where).toBe(`UPPER("name") = ?`) + expect(result.params).toEqual([`TEST`]) + }) + + it(`should compile lower function`, () => { + const result = compileSQLite({ + where: func(`eq`, [func(`lower`, [ref([`name`])]), val(`test`)]), + }) + + expect(result.where).toBe(`LOWER("name") = ?`) + expect(result.params).toEqual([`test`]) + }) + + it(`should compile coalesce function`, () => { + const result = compileSQLite({ + where: func(`eq`, [ + func(`coalesce`, [ref([`name`]), val(`default`)]), + val(`test`), + ]), + }) + + expect(result.where).toBe(`COALESCE("name", ?) = ?`) + expect(result.params).toEqual([`default`, `test`]) + }) + it(`should throw error for unsupported operators`, () => { expect(() => compileSQLite({ - where: func(`ilike`, [ref([`name`]), val(`%test%`)]), + where: func(`unsupported_op`, [ref([`name`]), val(`%test%`)]), }), - ).toThrow(`Operator 'ilike' is not supported`) + ).toThrow(`Operator 'unsupported_op' is not supported`) }) }) From eefc0435d315a330ee6d13b22a6ff7dacb9c429a Mon Sep 17 00:00:00 2001 From: Christiaan Landman Date: Mon, 9 Feb 2026 15:27:00 +0200 Subject: [PATCH 04/15] Unload tests. --- .../tests/on-demand-sync.test.ts | 118 ++++++++++++++++++ 1 file changed, 118 insertions(+) diff --git a/packages/powersync-db-collection/tests/on-demand-sync.test.ts b/packages/powersync-db-collection/tests/on-demand-sync.test.ts index 73fadcc5b..a946b8388 100644 --- a/packages/powersync-db-collection/tests/on-demand-sync.test.ts +++ b/packages/powersync-db-collection/tests/on-demand-sync.test.ts @@ -488,4 +488,122 @@ describe(`On-Demand Sync Mode`, () => { expect(electronicsQuery.size).toBe(3) expect(expensiveQuery.size).toBe(2) }) + + it(`should stop loading data for a predicate after its live query is cleaned up`, async () => { + const db = await createDatabase() + await createTestProducts(db) + + const collection = createCollection( + powerSyncCollectionOptions({ + database: db, + table: APP_SCHEMA.props.products, + syncMode: `on-demand`, + }), + ) + onTestFinished(() => collection.cleanup()) + + await collection.stateWhenReady() + + // LQ1: electronics category + const electronicsQuery = createCollection( + liveQueryCollectionOptions({ + query: (q) => + q + .from({ product: collection }) + .where(({ product }) => eq(product.category, `electronics`)) + .select(({ product }) => ({ + id: product.id, + name: product.name, + price: product.price, + category: product.category, + })), + }), + ) + onTestFinished(() => electronicsQuery.cleanup()) + + await electronicsQuery.preload() + + await vi.waitFor( + () => { + expect(electronicsQuery.size).toBe(3) + }, + { timeout: 2000 }, + ) + + // LQ2: clothing category + const clothingQuery = createCollection( + liveQueryCollectionOptions({ + query: (q) => + q + .from({ product: collection }) + .where(({ product }) => eq(product.category, `clothing`)) + .select(({ product }) => ({ + id: product.id, + name: product.name, + price: product.price, + category: product.category, + })), + }), + ) + + await clothingQuery.preload() + + await vi.waitFor( + () => { + expect(clothingQuery.size).toBe(2) + }, + { timeout: 2000 }, + ) + + const collectionSizeBeforeCleanup = collection.size + + // Kill LQ2 — its predicate should be removed via unloadSubset + clothingQuery.cleanup() + + // Insert a new clothing item — should NOT be picked up since LQ2 is gone + await db.execute(` + INSERT INTO products (id, name, price, category) + VALUES (uuid(), 'New Shirt', 40, 'clothing') + `) + + // Wait to allow any (incorrect) propagation + await new Promise((resolve) => setTimeout(resolve, 200)) + + // Base collection should not have grown from the clothing insert + // Only electronics predicate is active, and 'New Shirt' is clothing + expect(collection.size).toBe(collectionSizeBeforeCleanup) + + // Insert a new electronics item — should still be picked up by LQ1 + await db.execute(` + INSERT INTO products (id, name, price, category) + VALUES (uuid(), 'New Gadget', 99, 'electronics') + `) + + await vi.waitFor( + () => { + expect(electronicsQuery.size).toBe(4) // 3 original + New Gadget + }, + { timeout: 2000 }, + ) + + const collectionSizeAfterGadget = collection.size + + // Kill LQ1 — no active predicates remain + electronicsQuery.cleanup() + + // Insert items matching both former predicates — neither should be picked up + await db.execute(` + INSERT INTO products (id, name, price, category) + VALUES (uuid(), 'Another Gadget', 120, 'electronics') + `) + await db.execute(` + INSERT INTO products (id, name, price, category) + VALUES (uuid(), 'Another Shirt', 15, 'clothing') + `) + + await new Promise((resolve) => setTimeout(resolve, 200)) + + // Base collection should not have grown — no active predicates + expect(collection.size).toBe(collectionSizeAfterGadget) + }) }) From 5aee5c7d14e12672445f82cd13d5eeddbe085ea3 Mon Sep 17 00:00:00 2001 From: Christiaan Landman Date: Mon, 9 Feb 2026 15:57:48 +0200 Subject: [PATCH 05/15] Basic loadSubset behavior tests. --- .../powersync-db-collection/src/powersync.ts | 4 +- .../tests/on-demand-sync.test.ts | 192 ++++++++++++++++++ 2 files changed, 194 insertions(+), 2 deletions(-) diff --git a/packages/powersync-db-collection/src/powersync.ts b/packages/powersync-db-collection/src/powersync.ts index d8dece38b..87a4b17f0 100644 --- a/packages/powersync-db-collection/src/powersync.ts +++ b/packages/powersync-db-collection/src/powersync.ts @@ -341,7 +341,7 @@ export function powerSyncCollectionOptions< // Execute query against PowerSync SQLite const rows = await database.getAll(sql, queryParams) - console.log(`rows`, rows) + // Write rows to TanStack collection begin() for (const row of rows) { @@ -423,7 +423,7 @@ export function powerSyncCollectionOptions< selectSQL, selectParams, ) - console.log(`operations`, operations) + const pendingOperations: Array = [] for (const op of operations) { diff --git a/packages/powersync-db-collection/tests/on-demand-sync.test.ts b/packages/powersync-db-collection/tests/on-demand-sync.test.ts index a946b8388..8b6cc18de 100644 --- a/packages/powersync-db-collection/tests/on-demand-sync.test.ts +++ b/packages/powersync-db-collection/tests/on-demand-sync.test.ts @@ -2,10 +2,14 @@ import { randomUUID } from 'node:crypto' import { tmpdir } from 'node:os' import { PowerSyncDatabase, Schema, Table, column } from '@powersync/node' import { + and, createCollection, eq, gt, + gte, liveQueryCollectionOptions, + lt, + or, } from '@tanstack/db' import { describe, expect, it, onTestFinished, vi } from 'vitest' import { powerSyncCollectionOptions } from '../src' @@ -606,4 +610,192 @@ describe(`On-Demand Sync Mode`, () => { // Base collection should not have grown — no active predicates expect(collection.size).toBe(collectionSizeAfterGadget) }) + + describe(`Basic loadSubset behavior`, () => { + it(`should pass correct WHERE clause from live query filters to loadSubset`, async () => { + const db = await createDatabase() + await createTestProducts(db) + + const collection = createCollection( + powerSyncCollectionOptions({ + database: db, + table: APP_SCHEMA.props.products, + syncMode: `on-demand`, + }), + ) + onTestFinished(() => collection.cleanup()) + await collection.stateWhenReady() + + // Query using lt — only products with price < 50: Product C (25) + const cheapQuery = createCollection( + liveQueryCollectionOptions({ + query: (q) => + q + .from({ product: collection }) + .where(({ product }) => lt(product.price, 50)) + .select(({ product }) => ({ + id: product.id, + name: product.name, + price: product.price, + category: product.category, + })), + }), + ) + onTestFinished(() => cheapQuery.cleanup()) + + await cheapQuery.preload() + + await vi.waitFor( + () => { + expect(cheapQuery.size).toBe(1) + }, + { timeout: 2000 }, + ) + + const names = cheapQuery.toArray.map((p) => p.name) + expect(names).toEqual([`Product C`]) + }) + + it(`should pass ORDER BY and LIMIT to loadSubset`, async () => { + const db = await createDatabase() + await createTestProducts(db) + + const collection = createCollection( + powerSyncCollectionOptions({ + database: db, + table: APP_SCHEMA.props.products, + syncMode: `on-demand`, + }), + ) + onTestFinished(() => collection.cleanup()) + await collection.stateWhenReady() + + // Top 2 most expensive products, ordered by price descending + const top2Query = createCollection( + liveQueryCollectionOptions({ + query: (q) => + q + .from({ product: collection }) + .orderBy(({ product }) => product.price, `desc`) + .limit(2) + .select(({ product }) => ({ + id: product.id, + name: product.name, + price: product.price, + category: product.category, + })), + }), + ) + onTestFinished(() => top2Query.cleanup()) + + await top2Query.preload() + + await vi.waitFor( + () => { + expect(top2Query.size).toBe(2) + }, + { timeout: 2000 }, + ) + + const prices = top2Query.toArray.map((p) => p.price) + // Product D (200) and Product B (150) are the top 2 + expect(prices).toEqual([200, 150]) + }) + + it(`should handle complex filters (AND, OR) in loadSubset`, async () => { + const db = await createDatabase() + await createTestProducts(db) + + const collection = createCollection( + powerSyncCollectionOptions({ + database: db, + table: APP_SCHEMA.props.products, + syncMode: `on-demand`, + }), + ) + onTestFinished(() => collection.cleanup()) + await collection.stateWhenReady() + + // Complex filter: (electronics AND price >= 150) OR (clothing AND price < 50) + // Matches: Product B (electronics, 150), Product D (electronics, 200), Product C (clothing, 25) + const complexQuery = createCollection( + liveQueryCollectionOptions({ + query: (q) => + q + .from({ product: collection }) + .where(({ product }) => + or( + and( + eq(product.category, `electronics`), + gte(product.price, 150), + ), + and( + eq(product.category, `clothing`), + lt(product.price, 50), + ), + ), + ) + .select(({ product }) => ({ + id: product.id, + name: product.name, + price: product.price, + category: product.category, + })), + }), + ) + onTestFinished(() => complexQuery.cleanup()) + + await complexQuery.preload() + + await vi.waitFor( + () => { + expect(complexQuery.size).toBe(3) + }, + { timeout: 2000 }, + ) + + const names = complexQuery.toArray.map((p) => p.name).sort() + expect(names).toEqual([`Product B`, `Product C`, `Product D`]) + }) + + it(`should handle empty result from loadSubset`, async () => { + const db = await createDatabase() + await createTestProducts(db) + + const collection = createCollection( + powerSyncCollectionOptions({ + database: db, + table: APP_SCHEMA.props.products, + syncMode: `on-demand`, + }), + ) + onTestFinished(() => collection.cleanup()) + await collection.stateWhenReady() + + // Query for a category that doesn't exist — no matching rows + const emptyQuery = createCollection( + liveQueryCollectionOptions({ + query: (q) => + q + .from({ product: collection }) + .where(({ product }) => eq(product.category, `furniture`)) + .select(({ product }) => ({ + id: product.id, + name: product.name, + price: product.price, + category: product.category, + })), + }), + ) + onTestFinished(() => emptyQuery.cleanup()) + + await emptyQuery.preload() + + // Give it time to process + await new Promise((resolve) => setTimeout(resolve, 200)) + + expect(emptyQuery.size).toBe(0) + expect(collection.size).toBe(0) + }) + }) }) From 5230058565a6b1978a637df15e588457fa1e2d9f Mon Sep 17 00:00:00 2001 From: Christiaan Landman Date: Mon, 9 Feb 2026 16:13:11 +0200 Subject: [PATCH 06/15] Reactive updates via diff trigger tests and handling edge cases where data moved out of the area of interest. --- .../powersync-db-collection/src/powersync.ts | 21 +- .../tests/on-demand-sync.test.ts | 224 ++++++++++++++++++ 2 files changed, 241 insertions(+), 4 deletions(-) diff --git a/packages/powersync-db-collection/src/powersync.ts b/packages/powersync-db-collection/src/powersync.ts index 87a4b17f0..6c947fcb1 100644 --- a/packages/powersync-db-collection/src/powersync.ts +++ b/packages/powersync-db-collection/src/powersync.ts @@ -404,17 +404,30 @@ export function powerSyncCollectionOptions< ...activeWhereExpressions.slice(2), ) - // Filter operations using json_extract on the value column + // Filter operations where the new value matches any active predicate const compiled = compileSQLite( { where: combinedWhere }, { jsonColumn: 'value' }, ) + // Also filter UPDATEs where the previous value matched — this catches + // rows moving OUT of a predicate's scope (e.g. category changed from + // 'electronics' to 'clothing' while filtering for 'electronics') + const compiledPrev = compileSQLite( + { where: combinedWhere }, + { jsonColumn: 'previous_value' }, + ) if (compiled.where) { - selectSQL += ` WHERE ${compiled.where}` + selectSQL += ` WHERE (${compiled.where})` selectParams = [...compiled.params] - } - // ignore order by and limit - irrelevant for deciding what to load + if (compiledPrev.where) { + selectSQL += ` OR (operation = ? AND ${compiledPrev.where})` + selectParams.push( + DiffTriggerOperation.UPDATE, + ...compiledPrev.params, + ) + } + } } selectSQL += ` ORDER BY timestamp ASC` diff --git a/packages/powersync-db-collection/tests/on-demand-sync.test.ts b/packages/powersync-db-collection/tests/on-demand-sync.test.ts index 8b6cc18de..9d246026b 100644 --- a/packages/powersync-db-collection/tests/on-demand-sync.test.ts +++ b/packages/powersync-db-collection/tests/on-demand-sync.test.ts @@ -798,4 +798,228 @@ describe(`On-Demand Sync Mode`, () => { expect(collection.size).toBe(0) }) }) + + describe(`Reactive updates via diff trigger`, () => { + it(`should handle UPDATE to an existing row that still matches the predicate`, async () => { + const db = await createDatabase() + await createTestProducts(db) + + const collection = createCollection( + powerSyncCollectionOptions({ + database: db, + table: APP_SCHEMA.props.products, + syncMode: `on-demand`, + }), + ) + onTestFinished(() => collection.cleanup()) + await collection.stateWhenReady() + + const electronicsQuery = createCollection( + liveQueryCollectionOptions({ + query: (q) => + q + .from({ product: collection }) + .where(({ product }) => eq(product.category, `electronics`)) + .select(({ product }) => ({ + id: product.id, + name: product.name, + price: product.price, + category: product.category, + })), + }), + ) + onTestFinished(() => electronicsQuery.cleanup()) + + await electronicsQuery.preload() + + await vi.waitFor( + () => { + // Products A(50), B(150), D(200) are electronics + expect(electronicsQuery.size).toBe(3) + }, + { timeout: 2000 }, + ) + + // Update Product A's price — still electronics, still matches + const productA = electronicsQuery.toArray.find((p) => p.name === `Product A`) + await db.execute(`UPDATE products SET price = 99 WHERE id = ?`, [productA!.id]) + + await vi.waitFor( + () => { + const updated = electronicsQuery.toArray.find((p) => p.name === `Product A`) + expect(updated?.price).toBe(99) + }, + { timeout: 2000 }, + ) + + // Size unchanged — same row, just updated + expect(electronicsQuery.size).toBe(3) + }) + + it(`should handle UPDATE that causes a row to no longer match the predicate`, async () => { + const db = await createDatabase() + await createTestProducts(db) + + const collection = createCollection( + powerSyncCollectionOptions({ + database: db, + table: APP_SCHEMA.props.products, + syncMode: `on-demand`, + }), + ) + onTestFinished(() => collection.cleanup()) + await collection.stateWhenReady() + + const electronicsQuery = createCollection( + liveQueryCollectionOptions({ + query: (q) => + q + .from({ product: collection }) + .where(({ product }) => eq(product.category, `electronics`)) + .select(({ product }) => ({ + id: product.id, + name: product.name, + price: product.price, + category: product.category, + })), + }), + ) + onTestFinished(() => electronicsQuery.cleanup()) + + await electronicsQuery.preload() + + await vi.waitFor( + () => { + expect(electronicsQuery.size).toBe(3) + }, + { timeout: 2000 }, + ) + + // Change Product A from electronics to clothing — no longer matches + const productA = electronicsQuery.toArray.find((p) => p.name === `Product A`) + await db.execute(`UPDATE products SET category = 'clothing' WHERE id = ?`, [productA!.id]) + + await vi.waitFor( + () => { + expect(electronicsQuery.size).toBe(2) + }, + { timeout: 2000 }, + ) + + const names = electronicsQuery.toArray.map((p) => p.name).sort() + expect(names).toEqual([`Product B`, `Product D`]) + }) + + it(`should handle UPDATE that causes a row to start matching the predicate`, async () => { + const db = await createDatabase() + await createTestProducts(db) + + const collection = createCollection( + powerSyncCollectionOptions({ + database: db, + table: APP_SCHEMA.props.products, + syncMode: `on-demand`, + }), + ) + onTestFinished(() => collection.cleanup()) + await collection.stateWhenReady() + + const electronicsQuery = createCollection( + liveQueryCollectionOptions({ + query: (q) => + q + .from({ product: collection }) + .where(({ product }) => eq(product.category, `electronics`)) + .select(({ product }) => ({ + id: product.id, + name: product.name, + price: product.price, + category: product.category, + })), + }), + ) + onTestFinished(() => electronicsQuery.cleanup()) + + await electronicsQuery.preload() + + await vi.waitFor( + () => { + // Products A(50), B(150), D(200) are electronics + expect(electronicsQuery.size).toBe(3) + }, + { timeout: 2000 }, + ) + + // Change Product C from clothing to electronics — now matches + // Product C has id we need to look up from SQLite directly + const productC = await db.get<{ id: string }>( + `SELECT id FROM products WHERE name = 'Product C'`, + ) + await db.execute(`UPDATE products SET category = 'electronics' WHERE id = ?`, [productC.id]) + + await vi.waitFor( + () => { + expect(electronicsQuery.size).toBe(4) + }, + { timeout: 2000 }, + ) + + const names = electronicsQuery.toArray.map((p) => p.name).sort() + expect(names).toEqual([`Product A`, `Product B`, `Product C`, `Product D`]) + }) + + it(`should handle DELETE of a matching row`, async () => { + const db = await createDatabase() + await createTestProducts(db) + + const collection = createCollection( + powerSyncCollectionOptions({ + database: db, + table: APP_SCHEMA.props.products, + syncMode: `on-demand`, + }), + ) + onTestFinished(() => collection.cleanup()) + await collection.stateWhenReady() + + const electronicsQuery = createCollection( + liveQueryCollectionOptions({ + query: (q) => + q + .from({ product: collection }) + .where(({ product }) => eq(product.category, `electronics`)) + .select(({ product }) => ({ + id: product.id, + name: product.name, + price: product.price, + category: product.category, + })), + }), + ) + onTestFinished(() => electronicsQuery.cleanup()) + + await electronicsQuery.preload() + + await vi.waitFor( + () => { + expect(electronicsQuery.size).toBe(3) + }, + { timeout: 2000 }, + ) + + // Delete Product A + const productA = electronicsQuery.toArray.find((p) => p.name === `Product A`) + await db.execute(`DELETE FROM products WHERE id = ?`, [productA!.id]) + + await vi.waitFor( + () => { + expect(electronicsQuery.size).toBe(2) + }, + { timeout: 2000 }, + ) + + const names = electronicsQuery.toArray.map((p) => p.name).sort() + expect(names).toEqual([`Product B`, `Product D`]) + }) + }) }) From 352e4e5a15f32723825b1f4be90f8cd4a391760a Mon Sep 17 00:00:00 2001 From: Christiaan Landman Date: Mon, 9 Feb 2026 16:17:52 +0200 Subject: [PATCH 07/15] Unload/cleanup and Edge case tests --- .../tests/on-demand-sync.test.ts | 309 ++++++++++++++++++ 1 file changed, 309 insertions(+) diff --git a/packages/powersync-db-collection/tests/on-demand-sync.test.ts b/packages/powersync-db-collection/tests/on-demand-sync.test.ts index 9d246026b..db4691d57 100644 --- a/packages/powersync-db-collection/tests/on-demand-sync.test.ts +++ b/packages/powersync-db-collection/tests/on-demand-sync.test.ts @@ -1022,4 +1022,313 @@ describe(`On-Demand Sync Mode`, () => { expect(names).toEqual([`Product B`, `Product D`]) }) }) + + describe(`Unload / cleanup`, () => { + it(`should handle rapid create-and-destroy of live queries without errors`, async () => { + const db = await createDatabase() + await createTestProducts(db) + + const collection = createCollection( + powerSyncCollectionOptions({ + database: db, + table: APP_SCHEMA.props.products, + syncMode: `on-demand`, + }), + ) + onTestFinished(() => collection.cleanup()) + await collection.stateWhenReady() + + // Rapidly create and destroy 5 live queries + for (let i = 0; i < 5; i++) { + const query = createCollection( + liveQueryCollectionOptions({ + query: (q) => + q + .from({ product: collection }) + .where(({ product }) => eq(product.category, `electronics`)) + .select(({ product }) => ({ + id: product.id, + name: product.name, + price: product.price, + category: product.category, + })), + }), + ) + query.cleanup() + } + + // Give time for any async cleanup to settle + await new Promise((resolve) => setTimeout(resolve, 200)) + + // Collection should still be functional — create one more and verify it works + const finalQuery = createCollection( + liveQueryCollectionOptions({ + query: (q) => + q + .from({ product: collection }) + .where(({ product }) => eq(product.category, `electronics`)) + .select(({ product }) => ({ + id: product.id, + name: product.name, + price: product.price, + category: product.category, + })), + }), + ) + onTestFinished(() => finalQuery.cleanup()) + + await finalQuery.preload() + + await vi.waitFor( + () => { + expect(finalQuery.size).toBe(3) + }, + { timeout: 2000 }, + ) + }) + + it(`should handle re-creating a live query with the same predicate after cleanup`, async () => { + const db = await createDatabase() + await createTestProducts(db) + + const collection = createCollection( + powerSyncCollectionOptions({ + database: db, + table: APP_SCHEMA.props.products, + syncMode: `on-demand`, + }), + ) + onTestFinished(() => collection.cleanup()) + await collection.stateWhenReady() + + // Create first query + const query1 = createCollection( + liveQueryCollectionOptions({ + query: (q) => + q + .from({ product: collection }) + .where(({ product }) => eq(product.category, `electronics`)) + .select(({ product }) => ({ + id: product.id, + name: product.name, + price: product.price, + category: product.category, + })), + }), + ) + + await query1.preload() + + await vi.waitFor( + () => { + expect(query1.size).toBe(3) + }, + { timeout: 2000 }, + ) + + // Destroy it + query1.cleanup() + + await new Promise((resolve) => setTimeout(resolve, 100)) + + // Re-create with same predicate + const query2 = createCollection( + liveQueryCollectionOptions({ + query: (q) => + q + .from({ product: collection }) + .where(({ product }) => eq(product.category, `electronics`)) + .select(({ product }) => ({ + id: product.id, + name: product.name, + price: product.price, + category: product.category, + })), + }), + ) + onTestFinished(() => query2.cleanup()) + + await query2.preload() + + await vi.waitFor( + () => { + expect(query2.size).toBe(3) + }, + { timeout: 2000 }, + ) + + // Verify reactive updates still work on the re-created query + await db.execute(` + INSERT INTO products (id, name, price, category) + VALUES (uuid(), 'Product F', 300, 'electronics') + `) + + await vi.waitFor( + () => { + expect(query2.size).toBe(4) + }, + { timeout: 2000 }, + ) + }) + }) + + describe(`Edge cases`, () => { + it(`should handle loadSubset with no WHERE clause (load all data)`, async () => { + const db = await createDatabase() + await createTestProducts(db) + + const collection = createCollection( + powerSyncCollectionOptions({ + database: db, + table: APP_SCHEMA.props.products, + syncMode: `on-demand`, + }), + ) + onTestFinished(() => collection.cleanup()) + await collection.stateWhenReady() + + // Query with no WHERE — selects all products + const allQuery = createCollection( + liveQueryCollectionOptions({ + query: (q) => + q + .from({ product: collection }) + .select(({ product }) => ({ + id: product.id, + name: product.name, + price: product.price, + category: product.category, + })), + }), + ) + onTestFinished(() => allQuery.cleanup()) + + await allQuery.preload() + + await vi.waitFor( + () => { + expect(allQuery.size).toBe(5) + }, + { timeout: 2000 }, + ) + }) + + it(`should handle empty result from loadSubset (no matching rows in SQLite)`, async () => { + const db = await createDatabase() + await createTestProducts(db) + + const collection = createCollection( + powerSyncCollectionOptions({ + database: db, + table: APP_SCHEMA.props.products, + syncMode: `on-demand`, + }), + ) + onTestFinished(() => collection.cleanup()) + await collection.stateWhenReady() + + const emptyQuery = createCollection( + liveQueryCollectionOptions({ + query: (q) => + q + .from({ product: collection }) + .where(({ product }) => eq(product.category, `furniture`)) + .select(({ product }) => ({ + id: product.id, + name: product.name, + price: product.price, + category: product.category, + })), + }), + ) + onTestFinished(() => emptyQuery.cleanup()) + + await emptyQuery.preload() + + await new Promise((resolve) => setTimeout(resolve, 200)) + + expect(emptyQuery.size).toBe(0) + expect(collection.size).toBe(0) + }) + + it(`should handle concurrent loadSubset calls (multiple queries preloading simultaneously)`, async () => { + const db = await createDatabase() + await createTestProducts(db) + + const collection = createCollection( + powerSyncCollectionOptions({ + database: db, + table: APP_SCHEMA.props.products, + syncMode: `on-demand`, + }), + ) + onTestFinished(() => collection.cleanup()) + await collection.stateWhenReady() + + // Create three queries but don't await preload individually + const electronicsQuery = createCollection( + liveQueryCollectionOptions({ + query: (q) => + q + .from({ product: collection }) + .where(({ product }) => eq(product.category, `electronics`)) + .select(({ product }) => ({ + id: product.id, + name: product.name, + price: product.price, + category: product.category, + })), + }), + ) + onTestFinished(() => electronicsQuery.cleanup()) + + const clothingQuery = createCollection( + liveQueryCollectionOptions({ + query: (q) => + q + .from({ product: collection }) + .where(({ product }) => eq(product.category, `clothing`)) + .select(({ product }) => ({ + id: product.id, + name: product.name, + price: product.price, + category: product.category, + })), + }), + ) + onTestFinished(() => clothingQuery.cleanup()) + + const expensiveQuery = createCollection( + liveQueryCollectionOptions({ + query: (q) => + q + .from({ product: collection }) + .where(({ product }) => gt(product.price, 100)) + .select(({ product }) => ({ + id: product.id, + name: product.name, + price: product.price, + category: product.category, + })), + }), + ) + onTestFinished(() => expensiveQuery.cleanup()) + + // Preload all concurrently + await Promise.all([ + electronicsQuery.preload(), + clothingQuery.preload(), + expensiveQuery.preload(), + ]) + + await vi.waitFor( + () => { + expect(electronicsQuery.size).toBe(3) // A, B, D + expect(clothingQuery.size).toBe(2) // C, E + expect(expensiveQuery.size).toBe(2) // B, D + }, + { timeout: 2000 }, + ) + }) + }) }) From 01b0e73bc32d134fa17fc6f5fbcd4cc86ccf9e06 Mon Sep 17 00:00:00 2001 From: Christiaan Landman Date: Tue, 10 Feb 2026 10:17:18 +0200 Subject: [PATCH 08/15] Cleared research directory. --- .../research/ARCHITECTURE.md | 608 ------------------ .../on-demand-sync-implementation-plan.md | 351 ---------- .../research/on-demand-sync-research.md | 329 ---------- .../research/writeup.md | 103 --- 4 files changed, 1391 deletions(-) delete mode 100644 packages/powersync-db-collection/research/ARCHITECTURE.md delete mode 100644 packages/powersync-db-collection/research/on-demand-sync-implementation-plan.md delete mode 100644 packages/powersync-db-collection/research/on-demand-sync-research.md delete mode 100644 packages/powersync-db-collection/research/writeup.md diff --git a/packages/powersync-db-collection/research/ARCHITECTURE.md b/packages/powersync-db-collection/research/ARCHITECTURE.md deleted file mode 100644 index f77b34c0e..000000000 --- a/packages/powersync-db-collection/research/ARCHITECTURE.md +++ /dev/null @@ -1,608 +0,0 @@ -# TanStack DB & PowerSync Integration: Architecture Deep Dive - -## Table of Contents - -1. [TanStack DB Overview](#1-tanstack-db-overview) -2. [Core Concepts](#2-core-concepts) -3. [Collection Architecture](#3-collection-architecture) -4. [State Management & Virtual Derived State](#4-state-management--virtual-derived-state) -5. [Transaction System](#5-transaction-system) -6. [Sync Protocol](#6-sync-protocol) -7. [Query System](#7-query-system) -8. [Indexing](#8-indexing) -9. [PowerSync Integration](#9-powersync-integration) -10. [End-to-End Data Flow](#10-end-to-end-data-flow) - ---- - -## 1. TanStack DB Overview - -TanStack DB is a **reactive, client-side data store** that provides: - -- **Normalized, collection-based data management** — data lives in typed collections keyed by a primary key. -- **Live queries** — queries automatically re-evaluate when underlying data changes. -- **Optimistic mutations** — writes apply to the UI instantly; the sync layer persists them asynchronously. -- **Pluggable sync backends** — PowerSync, ElectricSQL, RxDB, local storage, or custom adapters. -- **Framework adapters** — React, Vue, Svelte, Solid, Angular. - -The core library (`@tanstack/db`) is framework- and backend-agnostic. Backend packages like `@tanstack/powersync-db-collection` provide a `collectionOptions` factory that wires up sync, mutations, and schema conversion for a specific backend. - -### Package Map - -``` -packages/ -├── db/ # Core library (@tanstack/db) -├── db-ivm/ # Incremental View Maintenance engine -├── powersync-db-collection/ # PowerSync adapter ← this package -├── electric-db-collection/ # ElectricSQL adapter -├── rxdb-db-collection/ # RxDB adapter -├── query-db-collection/ # Generic query-based adapter -├── offline-transactions/ # Offline transaction queue -├── react-db/ # React bindings -├── vue-db/ # Vue bindings -├── svelte-db/ # Svelte bindings -├── solid-db/ # Solid bindings -└── angular-db/ # Angular bindings -``` - ---- - -## 2. Core Concepts - -### Collections - -A **Collection** is a typed, keyed store of objects — analogous to a database table. Every item has a unique key (usually `id`). Collections are the fundamental unit of data in TanStack DB. - -```typescript -const todos = createCollection({ - id: 'todos', - getKey: (todo) => todo.id, - schema: todoSchema, - sync: { /* backend-specific */ }, -}) -``` - -### Transactions - -All mutations (insert/update/delete) produce a **Transaction**. Transactions are optimistic — they apply to the UI immediately — and carry deferred promises (`isPersisted`, `isApplied`) that resolve when the backend confirms the write. - -### Sync - -The **sync layer** is a callback protocol. The backend calls `begin()` / `write()` / `commit()` / `markReady()` to push changes into the collection. The collection merges synced data with optimistic state to produce the visible state. - -### Live Queries - -Queries are defined with a SQL-like builder and compiled to an intermediate representation (IR). They produce **derived collections** that auto-update when source collections change. - ---- - -## 3. Collection Architecture - -A Collection (`packages/db/src/collection/index.ts`) delegates its responsibilities to seven specialized managers: - -``` -CollectionImpl -├── CollectionStateManager — holds synced data, optimistic mutations, virtual derived state -├── CollectionSyncManager — coordinates sync lifecycle and batching -├── CollectionMutationsManager — handles insert/update/delete operations -├── CollectionEventsManager — typed event emission (status changes, subscriber changes) -├── CollectionChangesManager — manages subscriptions and change propagation -├── CollectionLifecycleManager — status transitions (idle → loading → ready), GC -└── CollectionIndexesManager — auto-indexes and manual index management -``` - -### Status Lifecycle - -``` -idle ──→ loading ──→ ready ──→ cleaned-up - │ ↑ - └──→ error ──────────┘ -``` - -- **idle**: Created but no subscribers yet. -- **loading**: Sync started, waiting for `markReady()`. -- **ready**: Initial data loaded, queries can run. -- **error**: Sync or lifecycle error. -- **cleaned-up**: All subscribers gone, GC ran. - -### Lazy Activation - -Collections use a **lazy activation** pattern: sync only starts when the first subscriber appears. When all subscribers leave, a GC timeout starts. If no new subscriber arrives, the collection cleans up. - ---- - -## 4. State Management & Virtual Derived State - -**File**: `packages/db/src/collection/state.ts` - -The state manager holds three layers of data: - -``` -┌─────────────────────────────┐ -│ Visible (virtual) state │ ← what queries and UI see -├─────────────────────────────┤ -│ Optimistic upserts/deletes │ ← from local transactions not yet confirmed -├─────────────────────────────┤ -│ Synced data │ ← confirmed state from the backend -└─────────────────────────────┘ -``` - -**Key data structures:** - -| Field | Type | Purpose | -|---|---|---| -| `syncedData` | `SortedMap` | Backend-confirmed rows | -| `syncedMetadata` | `Map` | Per-row sync metadata | -| `optimisticUpserts` | `Map` | Locally inserted/updated rows | -| `optimisticDeletes` | `Set` | Locally deleted keys | -| `transactions` | `SortedMap` | Active transactions | - -**Virtual derivation**: When you call `state.get(key)`, it checks optimistic deletes first, then optimistic upserts, then synced data. There is no materialized "merged" copy — it's computed on read, avoiding double-bookkeeping. - -### SortedMap - -`SortedMap` (`packages/db/src/SortedMap.ts`) wraps a `Map` with a sorted key array for deterministic iteration. Insertions use binary search for O(log n) positioning. - ---- - -## 5. Transaction System - -**File**: `packages/db/src/transactions.ts` - -### Mutation Types - -```typescript -type PendingMutation = { - type: 'insert' | 'update' | 'delete' - key: string | number - value?: T // for insert - changes?: Partial // for update - previousValue?: T // for rollback -} -``` - -### Mutation Merging - -When multiple mutations target the same key within a transaction, they merge according to this truth table: - -| Existing | New | Result | -|---|---|---| -| insert | update | insert (merged) | -| insert | delete | both cancelled | -| update | update | update (last wins, changes unioned) | -| update | delete | delete | -| delete | insert | update | -| delete | delete | delete | - -### Transaction Lifecycle - -``` -pending ──→ persisting ──→ completed - │ - └──→ failed (rollback optimistic state) -``` - -Each transaction carries two deferred promises: -- **`isPersisted`** — resolves when the backend confirms the write. -- **`isApplied`** — resolves when the synced data reflects the change. - ---- - -## 6. Sync Protocol - -**File**: `packages/db/src/collection/sync.ts` - -The sync config is a callback that receives control functions: - -```typescript -sync: { - sync: ({ collection, begin, write, commit, markReady, truncate }) => { - // 1. Initial load - begin() - for (const row of initialData) { - write({ type: 'insert', key: row.id, value: row }) - } - commit() - markReady() // signals collection is ready - - // 2. Live changes - onChange((change) => { - begin() - write(change) - commit() - }) - - return () => { /* cleanup */ } - } -} -``` - -| Function | Purpose | -|---|---| -| `begin()` | Start a sync transaction | -| `write(msg)` | Add a change message (insert/update/delete) | -| `commit()` | Finalize the transaction, apply to synced state | -| `markReady()` | Signal initial load complete — collection transitions to "ready" | -| `truncate()` | Clear all synced data (used on 409 / must-refetch) | - -### Change Messages - -```typescript -type ChangeMessage = { - type: 'insert' | 'update' | 'delete' - key: TKey - value: T // for insert/update - previousValue?: T // for update - metadata?: unknown -} -``` - ---- - -## 7. Query System - -### Query IR - -**File**: `packages/db/src/query/ir.ts` - -Queries are represented as an intermediate representation (IR): - -```typescript -interface QueryIR { - from: CollectionRef | QueryRef - select?: SelectObject | AggregateSelect - join?: JoinClause[] - where?: WhereExpression[] - groupBy?: GroupByExpression - having?: HavingExpression[] - orderBy?: OrderByExpression - limit?: number - offset?: number - distinct?: true - singleResult?: true -} -``` - -### Builder API - -```typescript -const result = query - .from({ todos }) - .select({ id: todos.id, title: todos.title }) - .where(todos.completed.eq(true)) - .orderBy(todos.createdAt.desc()) - .limit(10) - .key(({ id }) => id) -``` - -### Live Query Collections - -Queries produce **derived collections** that update incrementally when source data changes. This is powered by the `@tanstack/db-ivm` (Incremental View Maintenance) package. - ---- - -## 8. Indexing - -**File**: `packages/db/src/indexes/` - -Collections support automatic and manual indexes: - -- **BTree Index**: For range queries and sorted iteration (`>`, `<`, `>=`, `<=`, `between`). -- **Hash Index**: For exact-match lookups (`=`, `in`). -- **Lazy Index**: Deferred building — the index isn't materialized until first queried. - -Auto-indexing can be `'eager'` (build immediately) or `'lazy'` (build on first query). - ---- - -## 9. PowerSync Integration - -**Package**: `@tanstack/powersync-db-collection` - -This package bridges TanStack DB with [PowerSync](https://www.powersync.com/), a sync layer that uses SQLite as the local database and provides offline-first sync with a PostgreSQL backend. - -### 9.1 Entry Point: `powerSyncCollectionOptions()` - -**File**: `packages/powersync-db-collection/src/powersync.ts` - -This factory function takes a PowerSync table + database and returns a complete TanStack DB collection config with sync, mutations, and schema handling wired up. - -```typescript -import { powerSyncCollectionOptions } from '@tanstack/powersync-db-collection' -import { createCollection } from '@tanstack/db' - -const todos = createCollection( - powerSyncCollectionOptions({ - database: powerSyncDb, - table: AppSchema.props.todos, - schema: zodTodoSchema, // optional — enables type transforms - }) -) -``` - -**What it generates:** - -| Config field | Generated value | -|---|---| -| `id` | Table name from PowerSync schema | -| `getKey` | `(row) => row.id` (PowerSync rows always have `id: string`) | -| `schema` | Converts PowerSync table columns to a StandardSchema validator, or uses the user-provided schema | -| `onInsert` | Delegates to `PowerSyncTransactor` | -| `onUpdate` | Delegates to `PowerSyncTransactor` | -| `onDelete` | Delegates to `PowerSyncTransactor` | -| `sync.sync` | Sets up diff trigger observation and initial data loading | -| `utils.getMeta()` | Returns table name, tracked table name, serialization info | - -### 9.2 Schema Conversion - -**File**: `packages/powersync-db-collection/src/schema.ts` - -PowerSync tables define columns with SQLite types (`TEXT`, `INTEGER`, `REAL`). The package converts these to a StandardSchema validator: - -``` -PowerSync Column Type → Validation Rule -TEXT → typeof value === 'string' -INTEGER → typeof value === 'number' && Number.isInteger(value) -REAL → typeof value === 'number' -``` - -Every table also gets an `id: string` field validated. - -When a user provides their own schema (e.g., Zod), the package uses that instead and handles serialization/deserialization between the rich TypeScript types and SQLite storage types. - -### 9.3 Serialization - -**File**: `packages/powersync-db-collection/src/serialization.ts` - -SQLite only stores TEXT, INTEGER, and REAL. When the user's schema has richer types (Date, boolean, nested objects), serialization converts them: - -| User Type | SQLite Type | Serialization | -|---|---|---| -| `string` | TEXT | as-is | -| `Date` | TEXT | `.toISOString()` | -| `object/array` | TEXT | `JSON.stringify()` | -| `number` | INTEGER/REAL | as-is | -| `boolean` | INTEGER | `true → 1`, `false → 0` | - -Custom serializers can be provided per-field: - -```typescript -powerSyncCollectionOptions({ - database: db, - table: schema.props.events, - schema: eventSchema, - serializer: { - startDate: (date: Date) => date.getTime(), // custom - }, -}) -``` - -### 9.4 Sync Implementation - -The sync function generated by `powerSyncCollectionOptions` does the following: - -#### Initial Load - -1. Query SQLite for all rows in the table, batched (`syncBatchSize`, default 1000). -2. For each batch, call `begin()` → `write()` for each row → `commit()`. -3. After all rows loaded, call `markReady()`. - -#### Live Change Observation - -PowerSync provides a **diff trigger** system. When data changes in SQLite (from sync or local writes), diff triggers fire with the operation type and affected row. - -The sync function: -1. Registers a diff trigger listener on the tracked table. -2. On each diff trigger event, reads the current row from SQLite. -3. Calls `begin()` → `write({ type, key, value })` → `commit()`. - -The tracked table name follows the pattern: if the table is `todos`, the tracked table is `ps_tracked__todos` (managed by PowerSync internally). - -#### Handling "Must Refetch" (409) - -If PowerSync signals a full re-sync is needed, the sync function calls `truncate()` to clear all synced data, then re-runs the initial load. - -### 9.5 PowerSyncTransactor - -**File**: `packages/powersync-db-collection/src/PowerSyncTransactor.ts` - -The transactor handles **outbound mutations** (user writes that need to persist to SQLite). - -#### Flow - -``` -User calls collection.insert({ title: 'Buy milk' }) - │ - ▼ -Transaction created (optimistic state applied immediately) - │ - ▼ -PowerSyncTransactor.applyTransaction(transaction) - │ - ├── Wait for all affected collections to be "ready" - │ - ├── Group mutations by collection - │ - ├── Execute in a PowerSync write transaction: - │ ├── INSERT INTO todos (id, title) VALUES (?, ?) - │ ├── UPDATE todos SET title = ? WHERE id = ? - │ └── DELETE FROM todos WHERE id = ? - │ - ├── Create PendingOperations for each mutation - │ - └── Return promise that resolves when diff triggers observe the changes -``` - -#### Key Detail: Waiting for Diff Triggers - -After writing to SQLite, the transactor doesn't resolve immediately. It creates `PendingOperation` entries and waits for the diff trigger observer (in the sync function) to see those same changes. This ensures the synced state in the collection is up-to-date before the transaction's `isPersisted` promise resolves. - -### 9.6 PendingOperationStore - -**File**: `packages/powersync-db-collection/src/PendingOperationStore.ts` - -A global singleton that bridges outbound mutations with the sync observer: - -``` -Transactor Sync Observer - │ │ - ├── store.waitFor(operation) ──────► │ - │ (creates deferred) │ - │ ├── store.resolvePendingFor(operations) - │ │ (resolves deferred) - ◄─────────────── resolved ────────────┘ -``` - -A `PendingOperation` contains: -- `tableName` — which table was modified -- `operation` — insert/update/delete -- `id` — the row ID -- `timestamp` — when the operation was created - -### 9.7 Type System - -PowerSync tables have a specific column structure. The package provides type helpers to bridge PowerSync's SQLite types with TypeScript: - -```typescript -// PowerSync table → TypeScript record type -type ExtractedTable = { - [K in keyof TTable['columnMap']]: SQLiteColumnType -} & { id: string } - -// When a user schema is provided, output type comes from the schema -type InferPowerSyncOutputType = - TSchema extends StandardSchemaV1 - ? StandardSchemaV1.InferOutput - : ExtractedTable -``` - -Three overloads of `powerSyncCollectionOptions` handle the three cases: -1. **No schema** — types are raw SQLite types (string | number | null). -2. **Schema with SQLite-compatible input** — schema validates but input types match SQLite. -3. **Schema with rich types** — schema has non-SQLite types (Date, boolean, etc.), serialization required. - ---- - -## 10. End-to-End Data Flow - -### Reading Data (Sync → UI) - -``` -PostgreSQL backend - │ - ▼ -PowerSync sync service - │ - ▼ -Local SQLite database - │ - ├── Diff triggers fire - │ - ▼ -Sync observer in powerSyncCollectionOptions - │ - ├── begin() → write(changeMessage) → commit() - │ - ▼ -CollectionStateManager.syncedData updated - │ - ▼ -CollectionChangesManager notifies subscribers - │ - ▼ -Live queries re-evaluate (via IVM) - │ - ▼ -React/Vue/Svelte component re-renders -``` - -### Writing Data (UI → Backend) - -``` -User action (e.g., click "Add Todo") - │ - ▼ -collection.insert({ id: uuid(), title: 'Buy milk' }) - │ - ├── Optimistic state applied immediately - │ └── UI re-renders with new todo - │ - ├── Transaction created (state: 'pending') - │ - ▼ -PowerSyncTransactor.applyTransaction() - │ - ├── Executes SQL: INSERT INTO todos ... - │ - ├── Creates PendingOperation - │ - ├── Transaction state → 'persisting' - │ - ▼ -Diff trigger fires (SQLite observed the insert) - │ - ├── Sync observer writes to collection - │ └── syncedData now contains the row - │ - ├── PendingOperationStore resolves - │ - ├── Optimistic state cleared for this key - │ - ├── Transaction state → 'completed' - │ - ▼ -transaction.isPersisted.promise resolves - │ - ▼ -PowerSync upload queue sends to backend - │ - ▼ -PostgreSQL backend persists the row -``` - -### Conflict Resolution - -When a sync update arrives for a key that has an optimistic mutation: -1. The synced data is updated. -2. The optimistic mutation remains active until its transaction completes. -3. The UI sees the optimistic version (optimistic takes priority). -4. When the transaction completes, the optimistic layer clears and the synced version becomes visible. - -If the backend rejects a write, the transaction moves to `'failed'`, the optimistic state rolls back, and the UI reverts to the synced version. - ---- - -## Key Files Reference - -### Core TanStack DB - -| File | Purpose | -|---|---| -| `packages/db/src/collection/index.ts` | Collection class, manager orchestration | -| `packages/db/src/collection/state.ts` | State manager, virtual derived state | -| `packages/db/src/collection/sync.ts` | Sync coordination | -| `packages/db/src/collection/mutations.ts` | Insert/update/delete handling | -| `packages/db/src/collection/events.ts` | Event types and emission | -| `packages/db/src/collection/changes.ts` | Subscription management | -| `packages/db/src/collection/lifecycle.ts` | Status transitions, GC | -| `packages/db/src/collection/subscription.ts` | Individual subscription tracking | -| `packages/db/src/transactions.ts` | Transaction class, mutation merging | -| `packages/db/src/query/ir.ts` | Query intermediate representation | -| `packages/db/src/query/builder/index.ts` | Fluent query builder | -| `packages/db/src/query/live-query-collection.ts` | Derived live collections | -| `packages/db/src/indexes/base-index.ts` | Index interface | -| `packages/db/src/SortedMap.ts` | Deterministic-order map | -| `packages/db/src/proxy.ts` | Change-tracking proxy for updates | -| `packages/db/src/types.ts` | All core type definitions | - -### PowerSync Integration - -| File | Purpose | -|---|---| -| `packages/powersync-db-collection/src/powersync.ts` | `powerSyncCollectionOptions()` factory | -| `packages/powersync-db-collection/src/PowerSyncTransactor.ts` | Outbound mutation handler | -| `packages/powersync-db-collection/src/PendingOperationStore.ts` | Bridges mutations ↔ sync observation | -| `packages/powersync-db-collection/src/schema.ts` | PowerSync table → StandardSchema conversion | -| `packages/powersync-db-collection/src/serialization.ts` | Rich types ↔ SQLite type conversion | -| `packages/powersync-db-collection/src/definitions.ts` | Type definitions, serializer config | -| `packages/powersync-db-collection/src/helpers.ts` | Type extraction, operation mapping | diff --git a/packages/powersync-db-collection/research/on-demand-sync-implementation-plan.md b/packages/powersync-db-collection/research/on-demand-sync-implementation-plan.md deleted file mode 100644 index e1338f0bc..000000000 --- a/packages/powersync-db-collection/research/on-demand-sync-implementation-plan.md +++ /dev/null @@ -1,351 +0,0 @@ -# Implementation Plan: `syncMode: 'on-demand'` for powersync-db-collection - -## Overview - -Add query-driven sync support to the PowerSync TanStack DB collection adapter. This enables loading data from the local PowerSync SQLite database into TanStack DB collections on-demand based on query predicates, rather than eagerly loading all data upfront. - -## Background - -### Current State -- PowerSync collection uses **eager loading only**: all data loaded in `beforeCreate` hook -- Syncs from local PowerSync SQLite → TanStack DB in-memory collection -- Uses diff triggers for continuous sync of changes -- No `loadSubset` or `unloadSubset` implementation - -### Target State -- Support `syncMode: 'eager'` (current behavior) and `syncMode: 'on-demand'` -- On-demand: collection marks ready immediately, loads data via `loadSubset` when queries need it -- Predicate push-down: convert TanStack expression trees to SQLite queries - -## Key Design Decisions - -### 1. SQLite vs PostgreSQL Compiler -Electric uses PostgreSQL. PowerSync uses SQLite. Key differences: -- Parameter placeholders: `?` instead of `$1, $2` -- IN operator: `IN (?, ?, ?)` instead of `= ANY($1)` -- No native ILIKE: use `LIKE` with `COLLATE NOCASE` -- Boolean storage: SQLite uses 0/1 integers (simpler than PG) - -### 2. Diff Trigger Behavior -Keep diff triggers firing for ALL SQLite changes regardless of sync mode. TanStack's subscription system filters changes based on query predicates. This ensures changes to loaded data propagate correctly. - -### 3. Deduplication Strategy -Use TanStack's built-in `DeduplicatedLoadSubset` class which: -- Tracks loaded predicates to avoid redundant queries -- Handles in-flight request deduplication -- Supports reset for must-refetch scenarios - -### 4. Unload Implementation -Start with no-op implementation. Data remains in SQLite; TanStack handles collection memory via subscription lifecycle/gcTime. - ---- - -## Implementation Steps - -### Step 1: Create SQLite Expression Compiler - -**New file**: `packages/powersync-db-collection/src/sqlite-compiler.ts` - -```typescript -import type { IR, LoadSubsetOptions } from '@tanstack/db' - -export interface SQLiteCompiledQuery { - where?: string - orderBy?: string - limit?: number - params: Array -} - -export function compileSQLite(options: LoadSubsetOptions): SQLiteCompiledQuery -``` - -**Operators to support**: -| TanStack Op | SQLite SQL | -|-------------|------------| -| eq | = | -| gt | > | -| gte | >= | -| lt | < | -| lte | <= | -| and | AND | -| or | OR | -| not | NOT | -| isNull/isUndefined | IS NULL | -| in | IN (?, ?, ...) | -| like | LIKE | -| ilike | LIKE (COLLATE NOCASE) | -| upper/lower | UPPER/LOWER | -| coalesce | COALESCE | - -### Step 2: Modify Sync Configuration - -**File**: `packages/powersync-db-collection/src/powersync.ts` - -#### Architecture Overview - -``` -┌─────────────────────────────────────────────────────────────────────┐ -│ PowerSync Adapter (powersync.ts) │ -│ │ -│ const sync: SyncConfig = { │ -│ sync: (params) => { │ -│ // 1. Define loadSubset inside sync function │ -│ const loadSubsetImpl = async (options) => { ... } │ -│ │ -│ // 2. Setup diff triggers, etc. │ -│ async function start() { ... } │ -│ start() │ -│ │ -│ // 3. Return SyncConfigRes with loadSubset │ -│ return { │ -│ cleanup: () => abortController.abort(), │ -│ loadSubset: syncMode === 'on-demand' ? loadSubsetImpl : undefined, -│ unloadSubset: syncMode === 'on-demand' ? () => {} : undefined, -│ } │ -│ } │ -│ } │ -└─────────────────────────────────────────────────────────────────────┘ - │ - ▼ -┌─────────────────────────────────────────────────────────────────────┐ -│ TanStack DB (CollectionSyncManager - sync.ts) │ -│ │ -│ startSync() { │ -│ const syncRes = this.config.sync.sync({ begin, write, ... }) │ -│ this.syncLoadSubsetFn = syncRes?.loadSubset // Stores it │ -│ │ -│ // Validates on-demand has loadSubset │ -│ if (syncMode === 'on-demand' && !this.syncLoadSubsetFn) { │ -│ throw new CollectionConfigurationError(...) │ -│ } │ -│ } │ -│ │ -│ loadSubset(options) { │ -│ if (syncMode === 'eager') return true // No-op for eager │ -│ return this.syncLoadSubsetFn(options) // Calls your function │ -│ } │ -└─────────────────────────────────────────────────────────────────────┘ - │ - ▼ -┌─────────────────────────────────────────────────────────────────────┐ -│ TanStack DB (CollectionSubscription - subscription.ts) │ -│ │ -│ requestSnapshot() { │ -│ // When query needs data, calls loadSubset │ -│ const syncResult = this.collection._sync.loadSubset({ │ -│ where: ..., │ -│ orderBy: ..., │ -│ limit: ..., │ -│ subscription: this, │ -│ }) │ -│ } │ -└─────────────────────────────────────────────────────────────────────┘ -``` - -#### Changes Required - -1. Extract `syncMode` from config (with default `'eager'`) -2. Modify `beforeCreate` hook: - - Eager: existing behavior (load all data, then markReady) - - On-demand: skip data load, markReady immediately -3. Define `loadSubsetImpl` function inside sync -4. Return `SyncConfigRes` object instead of just cleanup function - -### Step 3: Implement loadSubset Function - -**Full sync function structure** (showing where loadSubset fits): - -```typescript -const sync: SyncConfig = { - sync: (params) => { - const { begin, write, commit, markReady } = params - const abortController = new AbortController() - - // ───────────────────────────────────────────────────────────── - // Define loadSubset INSIDE the sync function (has access to - // begin/write/commit and database from closure) - // ───────────────────────────────────────────────────────────── - const loadSubsetImpl = async (options: LoadSubsetOptions): Promise => { - const { where, orderBy, limit, params } = compileSQLite(options) - - // Build SELECT query - let sql = `SELECT * FROM ${viewName}` - if (where) sql += ` WHERE ${where}` - if (orderBy) sql += ` ORDER BY ${orderBy}` - if (limit) sql += ` LIMIT ?` - - const queryParams = limit ? [...params, limit] : params - - // Execute against PowerSync SQLite - const rows = await database.getAll(sql, queryParams) - - // Write to TanStack collection - begin() - for (const row of rows) { - write({ type: 'insert', value: deserializeSyncRow(row) }) - } - commit() - } - - // ───────────────────────────────────────────────────────────── - // Existing async setup (diff triggers, initial load for eager) - // ───────────────────────────────────────────────────────────── - async function start() { - // ... existing onChangeWithCallback setup ... - - const disposeTracking = await database.triggers.createDiffTrigger({ - // ... existing trigger config ... - hooks: { - beforeCreate: async (context) => { - if (syncMode === 'on-demand') { - // On-demand: skip initial load, mark ready immediately - markReady() - return - } - - // Eager: existing batch loading logic - let currentBatchCount = syncBatchSize - let cursor = 0 - while (currentBatchCount == syncBatchSize) { - begin() - const batchItems = await context.getAll( - `SELECT * FROM ${viewName} LIMIT ? OFFSET ?`, - [syncBatchSize, cursor] - ) - // ... write each row ... - commit() - } - markReady() - }, - }, - }) - } - - start().catch((error) => /* ... */) - - // ───────────────────────────────────────────────────────────── - // Return SyncConfigRes (replaces the old `return () => { ... }`) - // ───────────────────────────────────────────────────────────── - return { - cleanup: () => { - database.logger.info(`Sync stopped for ${viewName}`) - abortController.abort() - }, - loadSubset: syncMode === 'on-demand' ? loadSubsetImpl : undefined, - unloadSubset: syncMode === 'on-demand' ? () => {} : undefined, - } - }, - getSyncMetadata: undefined, -} -``` - -### Step 4: Add Deduplication Wrapper - -```typescript -import { DeduplicatedLoadSubset } from '@tanstack/db' - -// Wrap loadSubset with deduplication -const dedupe = new DeduplicatedLoadSubset({ - loadSubset: loadSubsetImpl, - getKey: (row) => row.id -}) - -return { - cleanup: () => abortController.abort(), - loadSubset: syncMode === 'on-demand' ? dedupe.loadSubset : undefined, - unloadSubset: syncMode === 'on-demand' ? () => {} : undefined, // no-op for now -} -``` - -### Step 5: Export New Utilities - -**File**: `packages/powersync-db-collection/src/index.ts` - -Export the SQLite compiler for advanced users who want to customize query generation. - ---- - -## Files to Modify/Create - -| File | Action | Description | -|------|--------|-------------| -| `src/sqlite-compiler.ts` | Create | SQLite expression-to-SQL compiler | -| `src/powersync.ts` | Modify | Add syncMode handling, loadSubset impl | -| `src/definitions.ts` | Modify | Add any new config types if needed | -| `src/index.ts` | Modify | Export new utilities | -| `tests/sqlite-compiler.test.ts` | Create | Unit tests for SQL compiler | -| `tests/on-demand-sync.test.ts` | Create | Integration tests for on-demand mode | - ---- - -## Test Plan - -### Unit Tests: SQLite Compiler -- Each operator compiles correctly -- Nested expressions (AND/OR combinations) -- Parameter placeholder ordering -- Identifier quoting for column names -- Null value handling (error for comparison ops) -- OrderBy with direction and nulls handling -- Limit compilation - -### Integration Tests: On-Demand Sync -- Collection marks ready immediately without data -- loadSubset executes correct SQL -- Data appears in collection after loadSubset -- Duplicate rows handled (insert → update) -- Diff triggers still propagate changes -- Overlapping loadSubset calls are deduplicated -- Live queries trigger loadSubset correctly - -### E2E Test Scenario -```typescript -// 1. Create on-demand collection -const collection = createCollection(powerSyncCollectionOptions({ - database: db, - table: schema.products, - syncMode: 'on-demand', -})) - -// 2. Verify empty and ready -await collection.stateWhenReady() -expect(collection.size).toBe(0) - -// 3. Query triggers loadSubset -const query = useLiveQuery({ - query: (q) => q.from({ p: collection }) - .where(({ p }) => gt(p.price, 100)) - .select(({ p }) => p), -}) - -// 4. Verify data loaded -expect(query.data.length).toBeGreaterThan(0) -``` - ---- - -## Verification Steps - -1. Run existing tests to ensure eager mode still works -2. Run new SQLite compiler unit tests -3. Run on-demand integration tests -4. Manual testing with a React app using `useLiveQuery` - ---- - -## Design Decisions (Resolved) - -1. **Cursor-based pagination**: Start with basic `where`/`orderBy`/`limit`. Design the compiler interface to allow cursor support later without breaking changes. - -2. **Column name mapping**: Not needed. PowerSync schemas use snake_case directly in both JS and SQLite (e.g., `list_id`, `created_at`). No transformation required. - -3. **Error handling**: Log errors and continue. Keep collection in ready state so queries return empty/partial results rather than blocking the app. - ---- - -## References - -- TanStack DB types: `packages/db/src/types.ts` (LoadSubsetOptions, SyncConfigRes) -- Electric reference: `packages/electric-db-collection/src/sql-compiler.ts` -- Expression helpers: `packages/db/src/query/expression-helpers.ts` -- TanStack DB 0.5 blog: https://tanstack.com/blog/tanstack-db-0.5-query-driven-sync diff --git a/packages/powersync-db-collection/research/on-demand-sync-research.md b/packages/powersync-db-collection/research/on-demand-sync-research.md deleted file mode 100644 index c168be51b..000000000 --- a/packages/powersync-db-collection/research/on-demand-sync-research.md +++ /dev/null @@ -1,329 +0,0 @@ -# Research: On-Demand Sync for PowerSync + TanStack DB - -## Executive Summary - -This document captures research findings for implementing `syncMode: 'on-demand'` in the powersync-db-collection package. The feature enables query-driven data loading from the local PowerSync SQLite database into TanStack DB collections. - ---- - -## TanStack DB Query-Driven Sync (v0.5+) - -### Overview -TanStack DB v0.5 introduced "Query-Driven Sync" - a paradigm where components declare data needs through queries, which automatically translate into optimized data fetching. - -**Key concept**: "Your component's query becomes the API call." - -### Sync Modes - -| Mode | Behavior | Best For | -|------|----------|----------| -| `eager` | Load all data upfront, then mark ready | Small datasets (<10k rows), reference data | -| `on-demand` | Load data incrementally when queried | Large datasets, search interfaces, catalogs | - -### Core Interfaces - -```typescript -// packages/db/src/types.ts - -type SyncMode = `eager` | `on-demand` - -type LoadSubsetOptions = { - where?: BasicExpression // Filter predicate as expression tree - orderBy?: OrderBy // Sort specification - limit?: number // Row limit - cursor?: CursorExpressions // For cursor-based pagination - offset?: number // For offset-based pagination - subscription?: Subscription // The subscription requesting data -} - -type LoadSubsetFn = (options: LoadSubsetOptions) => true | Promise -type UnloadSubsetFn = (options: LoadSubsetOptions) => void - -type SyncConfigRes = { - cleanup?: CleanupFn - loadSubset?: LoadSubsetFn // Called when query needs data - unloadSubset?: UnloadSubsetFn // Called when data no longer needed -} -``` - -### Expression Tree Structure - -TanStack DB represents query predicates as expression trees: - -```typescript -type BasicExpression = - | { type: 'val', value: T } // Literal value - | { type: 'ref', path: string[] } // Column reference - | { type: 'func', name: string, args: BasicExpression[] } // Operator/function - -// Example: where price > 100 -{ - type: 'func', - name: 'gt', - args: [ - { type: 'ref', path: ['price'] }, - { type: 'val', value: 100 } - ] -} -``` - -### Expression Helpers - -TanStack DB provides utilities for parsing expression trees: - -```typescript -import { - parseWhereExpression, // Convert where to custom format - parseOrderByExpression, // Extract sort specs - extractSimpleComparisons, // Flatten AND-ed comparisons - parseLoadSubsetOptions, // Convenience wrapper - walkExpression, // Generic tree traversal - extractFieldPath, // Get column path from ref - extractValue // Get value from val -} from '@tanstack/db' -``` - ---- - -## Electric-db-collection Reference Implementation - -### Architecture - -Electric's implementation provides a complete on-demand sync solution for PostgreSQL. Key components: - -1. **sql-compiler.ts** - Converts expression trees to PostgreSQL SQL -2. **Deduplication** - Prevents redundant subset loads -3. **Progressive mode** - Hybrid eager+on-demand for fast initial load - -### SQL Compiler Patterns - -```typescript -// packages/electric-db-collection/src/sql-compiler.ts - -function compileSQL(options: LoadSubsetOptions): SubsetParams { - // 1. Compile where expression to SQL string + params - // 2. Compile orderBy to SQL string - // 3. Return { where, orderBy, limit, params } -} - -// Expression compilation is recursive: -// - val → parameter placeholder ($1, $2, ...) -// - ref → quoted identifier ("columnName") -// - func → operator/function call -``` - -### Operator Mappings - -| TanStack | PostgreSQL | Notes | -|----------|------------|-------| -| eq | = | | -| gt/gte/lt/lte | >/>=/2 args) | -| not | NOT | | -| isNull/isUndefined | IS NULL | | -| in | = ANY($1) | Array parameter | -| like/ilike | LIKE/ILIKE | | -| upper/lower | UPPER/LOWER | | - -### Deduplication Strategy - -Electric tracks loaded predicates to avoid redundant queries: - -```typescript -// Tracks what's been loaded -const syncedKeys = new Set() - -// On loadSubset: -// 1. Check if predicate already covered -// 2. Execute query if not covered -// 3. Track newly loaded keys -// 4. Convert inserts to updates for overlapping rows -``` - ---- - -## Current PowerSync Implementation - -### File: `packages/powersync-db-collection/src/powersync.ts` - -**Current behavior (eager only):** - -1. `beforeCreate` hook loads ALL data: - ```typescript - const batchItems = await context.getAll( - `SELECT * FROM ${viewName} LIMIT ? OFFSET ?`, - [syncBatchSize, cursor] - ) - ``` - -2. Diff triggers monitor SQLite changes: - ```typescript - database.triggers.createDiffTrigger({ - source: viewName, - destination: trackedTableName, - // Fires for INSERT, UPDATE, DELETE - }) - ``` - -3. Changes propagate to TanStack collection: - ```typescript - write({ - type: mapOperation(operation), // 'insert' | 'update' | 'delete' - value: deserializeSyncRow(row), - }) - ``` - -### Missing for On-Demand - -- No `syncMode` handling (always eager) -- No `loadSubset` implementation -- No SQLite expression compiler -- Returns only `cleanup`, not full `SyncConfigRes` - ---- - -## SQLite vs PostgreSQL Differences - -### Parameter Placeholders -- PostgreSQL: `$1, $2, $3` -- SQLite: `?, ?, ?` - -### Array Membership (IN operator) -- PostgreSQL: `column = ANY($1)` with array parameter -- SQLite: `column IN (?, ?, ?)` with individual parameters - -### Case-Insensitive LIKE -- PostgreSQL: `ILIKE` operator -- SQLite: `LIKE` with `COLLATE NOCASE` - -### Boolean Handling -- PostgreSQL: Native boolean type, needs special handling for < > comparisons -- SQLite: 0/1 integers, simpler comparison handling - -### Identifier Quoting -- PostgreSQL: `"identifier"` -- SQLite: `"identifier"` or `` `identifier` `` - ---- - -## Data Flow: On-Demand Sync - -``` -┌─────────────────┐ ┌──────────────────┐ ┌─────────────────┐ -│ React Query │────▶│ TanStack DB │────▶│ PowerSync │ -│ useLiveQuery │ │ Collection │ │ SQLite DB │ -└─────────────────┘ └──────────────────┘ └─────────────────┘ - │ │ │ - │ 1. Query declared │ │ - │ ─────────────────────▶│ │ - │ │ 2. loadSubset called │ - │ │ ──────────────────────▶│ - │ │ │ 3. SQL query - │ │ │ ─────────── - │ │ 4. Rows returned │ - │ │ ◀──────────────────────│ - │ 5. Data in collection │ │ - │ ◀─────────────────────│ │ - │ │ │ - │ 6. Live updates │ 7. Diff trigger fires │ - │ ◀─────────────────────│ ◀──────────────────────│ - │ │ │ -``` - ---- - -## TanStack DB Internal Architecture - -### Where loadSubset Lives - -**1. PowerSync provides it** (in sync function return): -```typescript -// powersync.ts -const sync: SyncConfig = { - sync: (params) => { - const loadSubsetImpl = async (options: LoadSubsetOptions) => { - // Query SQLite, write to collection - } - - return { - cleanup: () => { ... }, - loadSubset: syncMode === 'on-demand' ? loadSubsetImpl : undefined, - } - } -} -``` - -**2. TanStack stores it** (CollectionSyncManager.startSync): -```typescript -// packages/db/src/collection/sync.ts line 226 -this.syncLoadSubsetFn = syncRes?.loadSubset ?? null - -// Validation: on-demand REQUIRES loadSubset (lines 232-237) -if (this.syncMode === `on-demand` && !this.syncLoadSubsetFn) { - throw new CollectionConfigurationError(...) -} -``` - -**3. Subscriptions call it** (CollectionSubscription): -```typescript -// packages/db/src/collection/subscription.ts - -requestSnapshot() { - // line 366 - const syncResult = this.collection._sync.loadSubset({ - where: ..., - orderBy: ..., - limit: ..., - subscription: this, - }) -} - -requestLimitedSnapshot() { - // line 595 - for paginated queries - const syncResult = this.collection._sync.loadSubset({ - where, limit, orderBy, cursor, offset, subscription - }) -} -``` - -### Key Files in TanStack DB - -| File | Purpose | -|------|---------| -| `collection/sync.ts` | Stores loadSubset, validates on-demand config, exposes `loadSubset()` method | -| `collection/subscription.ts` | Calls loadSubset when queries need data, tracks loaded subsets for unload | -| `types.ts` | Defines `LoadSubsetOptions`, `SyncConfigRes`, `SyncMode` | -| `query/subset-dedupe.ts` | `DeduplicatedLoadSubset` class for preventing redundant loads | - ---- - -## Key Considerations - -### 1. Diff Triggers in On-Demand Mode -- Should continue firing for ALL changes -- TanStack filters by subscription predicates -- Ensures changes to loaded data propagate correctly - -### 2. Memory Management -- `unloadSubset` can be no-op initially -- Data stays in SQLite (source of truth) -- TanStack handles collection memory via gcTime/subscription lifecycle - -### 3. Deduplication -- Use TanStack's `DeduplicatedLoadSubset` class -- Handles overlapping queries efficiently -- Converts redundant inserts to updates - -### 4. Error Handling -- Log errors, don't block the app -- Keep collection in ready state -- Return empty/partial results on failure - ---- - -## Sources - -- [TanStack DB 0.5: Query-Driven Sync](https://tanstack.com/blog/tanstack-db-0.5-query-driven-sync) -- [TanStack DB Query Collection Docs](https://tanstack.com/db/latest/docs/collections/query-collection) -- [RFC: On-Demand Collection Loading via loadSubset](https://github.com/TanStack/db/discussions/676) -- [Electric + TanStack DB Integration](https://electric-sql.com/blog/2025/07/29/local-first-sync-with-tanstack-db) diff --git a/packages/powersync-db-collection/research/writeup.md b/packages/powersync-db-collection/research/writeup.md deleted file mode 100644 index c5217f670..000000000 --- a/packages/powersync-db-collection/research/writeup.md +++ /dev/null @@ -1,103 +0,0 @@ -# Query Driven Sync for PowerSync + TanstackDB - -## Background - -TanstackDB supports a collection option called "sync mode" which defines which strategy to use when syncing data to the TanstackDB collection. - -Note in the context of PowerSync and TanstackDB, there are two distinct definitions for "sync". For PowerSync, sync means syncing between the local PowerSync SQLite database and the remote sync service/backend. For TanstackDB, sync means syncing data from your local data source (in this case PowerSync's SQLite database) and the in-memory TanstackDB collection. - -TanstackDB describe its sync mode options as: --------------------- -Eager mode (default): Loads entire collection upfront. Best for <10k rows of mostly static data like user preferences or small reference tables. -- On-demand mode (aka query driven sync): Loads only what queries request. Best for large datasets (>50k rows), search interfaces, and catalogs where most data won't be accessed. -- Progressive mode: Loads query subset immediately, syncs full dataset in background. Best for collaborative apps needing instant first paint AND sub-millisecond queries. --------------------- - -The initial PowerSync-TanstackDB integration supports "Eager mode", effectively mirroring all the data from a PowerSync SQLite table to the collection. -We are interested in adding support for the on-demand/query driven mode. - -## Supporting On-Demand (Query Driven) Sync - -Query-driven sync works by evaluating queries against the given TanstackDB collection. The collection automatically pushes down query predicates (where clauses, orderBy, limit, and offset) to the collection's `queryFn`. This allows you to fetch only the data needed for each specific query, rather than fetching the entire dataset. - -This option could be opted in by passing the `syncMode: 'on-demand'` option to `powerSyncCollectionOptions`. We wouldn't expose the `queryFn` on this level as we should be able to resolve queryFn logic internally. - -An example pulled from their docs: - -``` -const productsCollection = createCollection( - queryCollectionOptions({ - syncMode: 'on-demand', // Enable predicate push-down - - queryFn: async (ctx) => { - const { limit, offset, where, orderBy } = ctx.meta.loadSubsetOptions - - // Build query parameters from parsed filters - const params = new URLSearchParams() - - // Add filters - const parsed = parseLoadSubsetOptions({ where, orderBy, limit }) - - // Add filters - parsed.filters.forEach(({ field, operator, value }) => { - const fieldName = field.join('.') - if (operator === 'eq') { - params.set(fieldName, String(value)) - } else if (operator === 'lt') { - params.set(`${fieldName}_lt`, String(value)) - } else if (operator === 'gt') { - params.set(`${fieldName}_gt`, String(value)) - } - }) - // Add sorting - // Add limit - // Add offset for pagination - - // or some local query against a sqlite db - const response = await fetch(`/api/products?${params}`) - return response.json() - }, - }) - ``` - -> Add PoC of mechanism/implementation/understanding/draw the rest of the owl - - -## Ramblings -### Using triggers -Remember that db triggers don't work on views, and if you need to work against underlying tables json_extract will be needed - relevant when processing the subset loading. - -### Query cursor support -The electric-db-collection package supports cursor-based pagination which I believe ties in with query driven-sync. We should be able to support this by passing the cursor expressions to the `queryFn` and let the `queryFn` handle the pagination. - -### Live queries -Need to investigate how live queries work with query-driven sync. Do we need to do any additional work on top of the existing triggers? - -### Error handling -Need to investigate how error handling works with query-driven sync. Do we need to do any additional work on top of the existing error handling? - -## Optimising remote sync -Similar thoughts/suggestions were raised by Simon, Steven, and Kobie - for the second part of this work. -Query-driven sync optimizes the data loaded from your local SQLite database to the collection, but that doesn't change how much data is loaded into the database. A possible optimization that could be useful for both `eager` and `on-demand` modes is tying in sync streams/sync stream parameters to limit the amount of data loaded to the database. - - -Paraphrasing Simon: -There might be a way to subscribe in PowerSync to sync streams with parameters depending on TanStackDB filters. Say I have a stream defined as `SELECT * FROM notes WHERE owner = request.user_id() AND project_id = subscription.parameter('project')`. It would be really cool if: -``` -const { data: todos } = useLiveQuery((q) => - q.from({ note: notesCollection }).where(({ note }) => note.project_id = 'my_project_id') -) -``` -could somehow realize that the filter correlates with a subscription parameter and auto-subscribe with `db.syncStream('name', {project_id: 'my_project_id'})`. - -And if you had two of the `useLiveQuery()` hooks like above filtering for different project ids, sync streams just let you do two subscriptions. - -Initial unknown is how you map collections -> tables -> sync stream names. Which will be quite complicated. The service would have to generate that information by looking at all sync streams to generate this pattern. - -Doing that in `queryFn` by analyzing `where` seems like a clean way to go about this though. Especially since we can await `waitForFirstSync()` on the stream subscription we're creating to show a loading state until the subset of data requested has actually been synced, that should nicely map to what TSDB expects. - -So if we listed all the possible strategies (ignoring the progressive mode): -1. On-demand with sync streams incorporated -2. Eager with sync streams incorporated -3. On-demand -4. Eager \ No newline at end of file From 0cb33cae600bccd5529d82509c4439578bc13112 Mon Sep 17 00:00:00 2001 From: Christiaan Landman Date: Tue, 10 Feb 2026 11:42:06 +0200 Subject: [PATCH 09/15] Overlap test. --- .../powersync-db-collection/src/powersync.ts | 7 +- .../tests/on-demand-sync.test.ts | 96 +++++++++++++++++++ 2 files changed, 98 insertions(+), 5 deletions(-) diff --git a/packages/powersync-db-collection/src/powersync.ts b/packages/powersync-db-collection/src/powersync.ts index 6c947fcb1..24ea5fcd5 100644 --- a/packages/powersync-db-collection/src/powersync.ts +++ b/packages/powersync-db-collection/src/powersync.ts @@ -308,7 +308,6 @@ export function powerSyncCollectionOptions< /** * Loads a subset of data from SQLite based on the provided options. - * Called by TanStack DB when a query needs data in on-demand mode. */ const loadSubsetImpl = async ( options: LoadSubsetOptions, @@ -316,7 +315,7 @@ export function powerSyncCollectionOptions< activeWhereExpressions.push(options.where) try { const compiled = compileSQLite(options) - + // Build the SELECT query let sql = `SELECT * FROM ${viewName}` const queryParams: Array = [...compiled.params] @@ -339,10 +338,8 @@ export function powerSyncCollectionOptions< queryParams, ) - // Execute query against PowerSync SQLite const rows = await database.getAll(sql, queryParams) - // Write rows to TanStack collection begin() for (const row of rows) { write({ @@ -356,7 +353,6 @@ export function powerSyncCollectionOptions< `loadSubset loaded ${rows.length} rows for ${viewName}`, ) } catch (error) { - // Log error but don't throw - keeps collection in ready state database.logger.error( `loadSubset failed for ${viewName}`, error, @@ -409,6 +405,7 @@ export function powerSyncCollectionOptions< { where: combinedWhere }, { jsonColumn: 'value' }, ) + // Also filter UPDATEs where the previous value matched — this catches // rows moving OUT of a predicate's scope (e.g. category changed from // 'electronics' to 'clothing' while filtering for 'electronics') diff --git a/packages/powersync-db-collection/tests/on-demand-sync.test.ts b/packages/powersync-db-collection/tests/on-demand-sync.test.ts index db4691d57..ac4258fe5 100644 --- a/packages/powersync-db-collection/tests/on-demand-sync.test.ts +++ b/packages/powersync-db-collection/tests/on-demand-sync.test.ts @@ -1331,4 +1331,100 @@ describe(`On-Demand Sync Mode`, () => { ) }) }) + + describe(`Overlapping data across queries`, () => { + it(`should deduplicate rows when multiple live queries load the same data`, async () => { + const db = await createDatabase() + await createTestProducts(db) + + const collection = createCollection( + powerSyncCollectionOptions({ + database: db, + table: APP_SCHEMA.props.products, + syncMode: `on-demand`, + }), + ) + onTestFinished(() => collection.cleanup()) + await collection.stateWhenReady() + + // LQ1: electronics category — matches A(50), B(150), D(200) + const electronicsQuery = createCollection( + liveQueryCollectionOptions({ + query: (q) => + q + .from({ product: collection }) + .where(({ product }) => eq(product.category, `electronics`)) + .select(({ product }) => ({ + id: product.id, + name: product.name, + price: product.price, + category: product.category, + })), + }), + ) + onTestFinished(() => electronicsQuery.cleanup()) + + await electronicsQuery.preload() + + await vi.waitFor( + () => { + expect(electronicsQuery.size).toBe(3) + }, + { timeout: 2000 }, + ) + + // LQ2: price > 100 — matches B(150), D(200) + // Products B and D overlap with LQ1 + const expensiveQuery = createCollection( + liveQueryCollectionOptions({ + query: (q) => + q + .from({ product: collection }) + .where(({ product }) => gt(product.price, 100)) + .select(({ product }) => ({ + id: product.id, + name: product.name, + price: product.price, + category: product.category, + })), + }), + ) + onTestFinished(() => expensiveQuery.cleanup()) + + await expensiveQuery.preload() + + await vi.waitFor( + () => { + expect(expensiveQuery.size).toBe(2) + }, + { timeout: 2000 }, + ) + + // Both loadSubset calls inserted rows B and D — base collection should have no duplicates + // Union of both subsets: A, B, D (B and D are shared) + const baseNames = collection.toArray.map((p: any) => p.name).sort() + expect(baseNames).toEqual([`Product A`, `Product B`, `Product D`]) + + // Both live queries return correct results over the shared data + const electronicsNames = electronicsQuery.toArray.map((p) => p.name).sort() + expect(electronicsNames).toEqual([`Product A`, `Product B`, `Product D`]) + + const expensiveNames = expensiveQuery.toArray.map((p) => p.name).sort() + expect(expensiveNames).toEqual([`Product B`, `Product D`]) + + // Update a shared row — both queries should see the change + const productB = expensiveQuery.toArray.find((p) => p.name === `Product B`) + await db.execute(`UPDATE products SET price = 175 WHERE id = ?`, [productB!.id]) + + await vi.waitFor( + () => { + const inElectronics = electronicsQuery.toArray.find((p) => p.name === `Product B`) + const inExpensive = expensiveQuery.toArray.find((p) => p.name === `Product B`) + expect(inElectronics?.price).toBe(175) + expect(inExpensive?.price).toBe(175) + }, + { timeout: 2000 }, + ) + }) + }) }) From 841fd866b7e4150bfd73fd93a740d1af7a4e96ac Mon Sep 17 00:00:00 2001 From: Christiaan Landman Date: Tue, 10 Feb 2026 14:04:09 +0200 Subject: [PATCH 10/15] Formatting. --- .../powersync-db-collection/src/powersync.ts | 27 +- .../src/sqlite-compiler.ts | 15 +- .../tests/on-demand-sync.test.ts | 865 +++++++++--------- .../tests/sqlite-compiler.test.ts | 7 +- 4 files changed, 449 insertions(+), 465 deletions(-) diff --git a/packages/powersync-db-collection/src/powersync.ts b/packages/powersync-db-collection/src/powersync.ts index 24ea5fcd5..5d206d4f3 100644 --- a/packages/powersync-db-collection/src/powersync.ts +++ b/packages/powersync-db-collection/src/powersync.ts @@ -315,7 +315,7 @@ export function powerSyncCollectionOptions< activeWhereExpressions.push(options.where) try { const compiled = compileSQLite(options) - + // Build the SELECT query let sql = `SELECT * FROM ${viewName}` const queryParams: Array = [...compiled.params] @@ -353,10 +353,7 @@ export function powerSyncCollectionOptions< `loadSubset loaded ${rows.length} rows for ${viewName}`, ) } catch (error) { - database.logger.error( - `loadSubset failed for ${viewName}`, - error, - ) + database.logger.error(`loadSubset failed for ${viewName}`, error) } } @@ -433,7 +430,7 @@ export function powerSyncCollectionOptions< selectSQL, selectParams, ) - + const pendingOperations: Array = [] for (const op of operations) { @@ -493,14 +490,14 @@ export function powerSyncCollectionOptions< }, hooks: { beforeCreate: async (context) => { - if (syncMode === 'on-demand') { - // On-demand: skip initial load, mark ready immediately + if (syncMode === 'on-demand') { + // On-demand: skip initial load, mark ready immediately markReady() database.logger.info( `Sync is ready for ${viewName} into ${trackedTableName} in on-demand mode`, - ) - return - } + ) + return + } let currentBatchCount = syncBatchSize let cursor = 0 @@ -556,11 +553,9 @@ export function powerSyncCollectionOptions< ) abortController.abort() }, - loadSubset: - syncMode === `on-demand` ? loadSubsetImpl : undefined, - unloadSubset: - syncMode === `on-demand` ? unloadSubsetImpl : undefined, - } + loadSubset: syncMode === `on-demand` ? loadSubsetImpl : undefined, + unloadSubset: syncMode === `on-demand` ? unloadSubsetImpl : undefined, + } }, // Expose the getSyncMetadata function getSyncMetadata: undefined, diff --git a/packages/powersync-db-collection/src/sqlite-compiler.ts b/packages/powersync-db-collection/src/sqlite-compiler.ts index 9305bc05e..ee96f258f 100644 --- a/packages/powersync-db-collection/src/sqlite-compiler.ts +++ b/packages/powersync-db-collection/src/sqlite-compiler.ts @@ -42,7 +42,10 @@ export interface CompileSQLiteOptions { * // Result: { where: '"price" > ?', orderBy: '"price" DESC', limit: 50, params: [100] } * ``` */ -export function compileSQLite(options: LoadSubsetOptions, compileOptions?: CompileSQLiteOptions): SQLiteCompiledQuery { +export function compileSQLite( + options: LoadSubsetOptions, + compileOptions?: CompileSQLiteOptions, +): SQLiteCompiledQuery { const { where, orderBy, limit } = options const params: Array = [] @@ -199,7 +202,9 @@ function compileFunction( throw new Error(`${name} expects at least 2 arguments`) } const opKeyword = name === `and` ? `AND` : `OR` - return compiledArgs.map((arg: string) => `(${arg})`).join(` ${opKeyword} `) + return compiledArgs + .map((arg: string) => `(${arg})`) + .join(` ${opKeyword} `) } case `not`: { @@ -210,7 +215,11 @@ function compileFunction( const arg = args[0] if (arg && arg.type === `func`) { if (arg.name === `isNull` || arg.name === `isUndefined`) { - const innerArg = compileExpression(arg.args[0]!, params, compileOptions) + const innerArg = compileExpression( + arg.args[0]!, + params, + compileOptions, + ) return `${innerArg} IS NOT NULL` } } diff --git a/packages/powersync-db-collection/tests/on-demand-sync.test.ts b/packages/powersync-db-collection/tests/on-demand-sync.test.ts index ac4258fe5..f41375544 100644 --- a/packages/powersync-db-collection/tests/on-demand-sync.test.ts +++ b/packages/powersync-db-collection/tests/on-demand-sync.test.ts @@ -4,10 +4,10 @@ import { PowerSyncDatabase, Schema, Table, column } from '@powersync/node' import { and, createCollection, + createLiveQueryCollection, eq, gt, gte, - liveQueryCollectionOptions, lt, or, } from '@tanstack/db' @@ -74,7 +74,7 @@ describe(`On-Demand Sync Mode`, () => { // Wait for collection to be ready await collection.stateWhenReady() - + // Verify NO data was loaded into the collection expect(collection.size).toBe(0) }) @@ -99,23 +99,19 @@ describe(`On-Demand Sync Mode`, () => { expect(collection.size).toBe(0) // Create a live query that filters for electronics over $100 - const expensiveElectronics = createCollection( - liveQueryCollectionOptions({ - query: (q) => - q - .from({ product: collection }) - .where(({ product }) => - eq(product.category, `electronics`), - ) - .where(({ product }) => gt(product.price, 100)) - .select(({ product }) => ({ - id: product.id, - name: product.name, - price: product.price, - category: product.category, - })), - }), - ) + const expensiveElectronics = createLiveQueryCollection({ + query: (q) => + q + .from({ product: collection }) + .where(({ product }) => eq(product.category, `electronics`)) + .where(({ product }) => gt(product.price, 100)) + .select(({ product }) => ({ + id: product.id, + name: product.name, + price: product.price, + category: product.category, + })), + }) onTestFinished(() => expensiveElectronics.cleanup()) // Preload triggers the live query to request data via loadSubset @@ -158,21 +154,20 @@ describe(`On-Demand Sync Mode`, () => { await collection.stateWhenReady() // Create a live query that filters for electronics over $100 - const expensiveElectronics = createCollection( - liveQueryCollectionOptions({ - query: (q) => - q - .from({ product: collection }) - .where(({ product }) => eq(product.category, `electronics`)) - .where(({ product }) => gt(product.price, 100)) - .select(({ product }) => ({ - id: product.id, - name: product.name, - price: product.price, - category: product.category, - })), - }), - ) + const expensiveElectronics = createLiveQueryCollection({ + query: (q) => + q + .from({ product: collection }) + .where(({ product }) => eq(product.category, `electronics`)) + .where(({ product }) => gt(product.price, 100)) + .select(({ product }) => ({ + id: product.id, + name: product.name, + price: product.price, + category: product.category, + })), + }) + onTestFinished(() => expensiveElectronics.cleanup()) // Preload triggers the live query to request data via loadSubset @@ -210,7 +205,9 @@ describe(`On-Demand Sync Mode`, () => { expect(names).toEqual([`Product B`, `Product D`, `Product F`]) // Verify the new product's price - const productF = expensiveElectronics.toArray.find((p) => p.name === `Product F`) + const productF = expensiveElectronics.toArray.find( + (p) => p.name === `Product F`, + ) expect(productF?.price).toBe(300) }) @@ -231,21 +228,19 @@ describe(`On-Demand Sync Mode`, () => { await collection.stateWhenReady() // Create a live query that filters for electronics over $100 - const expensiveElectronics = createCollection( - liveQueryCollectionOptions({ - query: (q) => - q - .from({ product: collection }) - .where(({ product }) => eq(product.category, `electronics`)) - .where(({ product }) => gt(product.price, 100)) - .select(({ product }) => ({ - id: product.id, - name: product.name, - price: product.price, - category: product.category, - })), - }), - ) + const expensiveElectronics = createLiveQueryCollection({ + query: (q) => + q + .from({ product: collection }) + .where(({ product }) => eq(product.category, `electronics`)) + .where(({ product }) => gt(product.price, 100)) + .select(({ product }) => ({ + id: product.id, + name: product.name, + price: product.price, + category: product.category, + })), + }) onTestFinished(() => expensiveElectronics.cleanup()) // Preload triggers the live query to request data via loadSubset @@ -293,7 +288,7 @@ describe(`On-Demand Sync Mode`, () => { it(`should handle multiple live queries without losing predicate coverage`, async () => { const db = await createDatabase() await createTestProducts(db) - + // Create collection with on-demand sync mode const collection = createCollection( powerSyncCollectionOptions({ @@ -303,28 +298,26 @@ describe(`On-Demand Sync Mode`, () => { }), ) onTestFinished(() => collection.cleanup()) - + await collection.stateWhenReady() - + // LQ1: electronics category - const electronicsQuery = createCollection( - liveQueryCollectionOptions({ - query: (q) => - q - .from({ product: collection }) - .where(({ product }) => eq(product.category, `electronics`)) - .select(({ product }) => ({ - id: product.id, - name: product.name, - price: product.price, - category: product.category, - })), - }), - ) + const electronicsQuery = createLiveQueryCollection({ + query: (q) => + q + .from({ product: collection }) + .where(({ product }) => eq(product.category, `electronics`)) + .select(({ product }) => ({ + id: product.id, + name: product.name, + price: product.price, + category: product.category, + })), + }) onTestFinished(() => electronicsQuery.cleanup()) - + await electronicsQuery.preload() - + await vi.waitFor( () => { // Products A(50), B(150), D(200) are electronics @@ -332,26 +325,25 @@ describe(`On-Demand Sync Mode`, () => { }, { timeout: 2000 }, ) - + // LQ2: price > 100 (different predicate on same collection) - const expensiveQuery = createCollection( - liveQueryCollectionOptions({ - query: (q) => - q - .from({ product: collection }) - .where(({ product }) => gt(product.price, 100)) - .select(({ product }) => ({ - id: product.id, - name: product.name, - price: product.price, - category: product.category, - })), - }), - ) + const expensiveQuery = createLiveQueryCollection({ + query: (q) => + q + .from({ product: collection }) + .where(({ product }) => gt(product.price, 100)) + .select(({ product }) => ({ + id: product.id, + name: product.name, + price: product.price, + category: product.category, + })), + }) + onTestFinished(() => expensiveQuery.cleanup()) - + await expensiveQuery.preload() - + await vi.waitFor( () => { // Products B(150) and D(200) have price > 100 @@ -359,13 +351,13 @@ describe(`On-Demand Sync Mode`, () => { }, { timeout: 2000 }, ) - + // Now insert a new product that matches LQ1 (electronics) but NOT LQ2 (price <= 100) await db.execute(` INSERT INTO products (id, name, price, category) VALUES (uuid(), 'Cheap Gadget', 30, 'electronics') `) - + // The diff trigger should use the OR of both active predicates: // (category = 'electronics') OR (price > 100) // 'Cheap Gadget' (electronics, price=30) matches the first predicate, @@ -394,20 +386,18 @@ describe(`On-Demand Sync Mode`, () => { await collection.stateWhenReady() // LQ1: electronics category - const electronicsQuery = createCollection( - liveQueryCollectionOptions({ - query: (q) => - q - .from({ product: collection }) - .where(({ product }) => eq(product.category, `electronics`)) - .select(({ product }) => ({ - id: product.id, - name: product.name, - price: product.price, - category: product.category, - })), - }), - ) + const electronicsQuery = createLiveQueryCollection({ + query: (q) => + q + .from({ product: collection }) + .where(({ product }) => eq(product.category, `electronics`)) + .select(({ product }) => ({ + id: product.id, + name: product.name, + price: product.price, + category: product.category, + })), + }) onTestFinished(() => electronicsQuery.cleanup()) await electronicsQuery.preload() @@ -421,20 +411,19 @@ describe(`On-Demand Sync Mode`, () => { ) // LQ2: price > 100 - const expensiveQuery = createCollection( - liveQueryCollectionOptions({ - query: (q) => - q - .from({ product: collection }) - .where(({ product }) => gt(product.price, 100)) - .select(({ product }) => ({ - id: product.id, - name: product.name, - price: product.price, - category: product.category, - })), - }), - ) + const expensiveQuery = createLiveQueryCollection({ + query: (q) => + q + .from({ product: collection }) + .where(({ product }) => gt(product.price, 100)) + .select(({ product }) => ({ + id: product.id, + name: product.name, + price: product.price, + category: product.category, + })), + }) + onTestFinished(() => expensiveQuery.cleanup()) await expensiveQuery.preload() @@ -448,20 +437,19 @@ describe(`On-Demand Sync Mode`, () => { ) // LQ3: clothing category — a third predicate to exercise the 3-arg OR path - const clothingQuery = createCollection( - liveQueryCollectionOptions({ - query: (q) => - q - .from({ product: collection }) - .where(({ product }) => eq(product.category, `clothing`)) - .select(({ product }) => ({ - id: product.id, - name: product.name, - price: product.price, - category: product.category, - })), - }), - ) + const clothingQuery = createLiveQueryCollection({ + query: (q) => + q + .from({ product: collection }) + .where(({ product }) => eq(product.category, `clothing`)) + .select(({ product }) => ({ + id: product.id, + name: product.name, + price: product.price, + category: product.category, + })), + }) + onTestFinished(() => clothingQuery.cleanup()) await clothingQuery.preload() @@ -509,20 +497,19 @@ describe(`On-Demand Sync Mode`, () => { await collection.stateWhenReady() // LQ1: electronics category - const electronicsQuery = createCollection( - liveQueryCollectionOptions({ - query: (q) => - q - .from({ product: collection }) - .where(({ product }) => eq(product.category, `electronics`)) - .select(({ product }) => ({ - id: product.id, - name: product.name, - price: product.price, - category: product.category, - })), - }), - ) + const electronicsQuery = createLiveQueryCollection({ + query: (q) => + q + .from({ product: collection }) + .where(({ product }) => eq(product.category, `electronics`)) + .select(({ product }) => ({ + id: product.id, + name: product.name, + price: product.price, + category: product.category, + })), + }) + onTestFinished(() => electronicsQuery.cleanup()) await electronicsQuery.preload() @@ -535,20 +522,18 @@ describe(`On-Demand Sync Mode`, () => { ) // LQ2: clothing category - const clothingQuery = createCollection( - liveQueryCollectionOptions({ - query: (q) => - q - .from({ product: collection }) - .where(({ product }) => eq(product.category, `clothing`)) - .select(({ product }) => ({ - id: product.id, - name: product.name, - price: product.price, - category: product.category, - })), - }), - ) + const clothingQuery = createLiveQueryCollection({ + query: (q) => + q + .from({ product: collection }) + .where(({ product }) => eq(product.category, `clothing`)) + .select(({ product }) => ({ + id: product.id, + name: product.name, + price: product.price, + category: product.category, + })), + }) await clothingQuery.preload() @@ -627,20 +612,19 @@ describe(`On-Demand Sync Mode`, () => { await collection.stateWhenReady() // Query using lt — only products with price < 50: Product C (25) - const cheapQuery = createCollection( - liveQueryCollectionOptions({ - query: (q) => - q - .from({ product: collection }) - .where(({ product }) => lt(product.price, 50)) - .select(({ product }) => ({ - id: product.id, - name: product.name, - price: product.price, - category: product.category, - })), - }), - ) + const cheapQuery = createLiveQueryCollection({ + query: (q) => + q + .from({ product: collection }) + .where(({ product }) => lt(product.price, 50)) + .select(({ product }) => ({ + id: product.id, + name: product.name, + price: product.price, + category: product.category, + })), + }) + onTestFinished(() => cheapQuery.cleanup()) await cheapQuery.preload() @@ -671,21 +655,19 @@ describe(`On-Demand Sync Mode`, () => { await collection.stateWhenReady() // Top 2 most expensive products, ordered by price descending - const top2Query = createCollection( - liveQueryCollectionOptions({ - query: (q) => - q - .from({ product: collection }) - .orderBy(({ product }) => product.price, `desc`) - .limit(2) - .select(({ product }) => ({ - id: product.id, - name: product.name, - price: product.price, - category: product.category, - })), - }), - ) + const top2Query = createLiveQueryCollection({ + query: (q) => + q + .from({ product: collection }) + .orderBy(({ product }) => product.price, `desc`) + .limit(2) + .select(({ product }) => ({ + id: product.id, + name: product.name, + price: product.price, + category: product.category, + })), + }) onTestFinished(() => top2Query.cleanup()) await top2Query.preload() @@ -718,31 +700,26 @@ describe(`On-Demand Sync Mode`, () => { // Complex filter: (electronics AND price >= 150) OR (clothing AND price < 50) // Matches: Product B (electronics, 150), Product D (electronics, 200), Product C (clothing, 25) - const complexQuery = createCollection( - liveQueryCollectionOptions({ - query: (q) => - q - .from({ product: collection }) - .where(({ product }) => - or( - and( - eq(product.category, `electronics`), - gte(product.price, 150), - ), - and( - eq(product.category, `clothing`), - lt(product.price, 50), - ), + const complexQuery = createLiveQueryCollection({ + query: (q) => + q + .from({ product: collection }) + .where(({ product }) => + or( + and( + eq(product.category, `electronics`), + gte(product.price, 150), ), - ) - .select(({ product }) => ({ - id: product.id, - name: product.name, - price: product.price, - category: product.category, - })), - }), - ) + and(eq(product.category, `clothing`), lt(product.price, 50)), + ), + ) + .select(({ product }) => ({ + id: product.id, + name: product.name, + price: product.price, + category: product.category, + })), + }) onTestFinished(() => complexQuery.cleanup()) await complexQuery.preload() @@ -773,20 +750,18 @@ describe(`On-Demand Sync Mode`, () => { await collection.stateWhenReady() // Query for a category that doesn't exist — no matching rows - const emptyQuery = createCollection( - liveQueryCollectionOptions({ - query: (q) => - q - .from({ product: collection }) - .where(({ product }) => eq(product.category, `furniture`)) - .select(({ product }) => ({ - id: product.id, - name: product.name, - price: product.price, - category: product.category, - })), - }), - ) + const emptyQuery = createLiveQueryCollection({ + query: (q) => + q + .from({ product: collection }) + .where(({ product }) => eq(product.category, `furniture`)) + .select(({ product }) => ({ + id: product.id, + name: product.name, + price: product.price, + category: product.category, + })), + }) onTestFinished(() => emptyQuery.cleanup()) await emptyQuery.preload() @@ -814,20 +789,18 @@ describe(`On-Demand Sync Mode`, () => { onTestFinished(() => collection.cleanup()) await collection.stateWhenReady() - const electronicsQuery = createCollection( - liveQueryCollectionOptions({ - query: (q) => - q - .from({ product: collection }) - .where(({ product }) => eq(product.category, `electronics`)) - .select(({ product }) => ({ - id: product.id, - name: product.name, - price: product.price, - category: product.category, - })), - }), - ) + const electronicsQuery = createLiveQueryCollection({ + query: (q) => + q + .from({ product: collection }) + .where(({ product }) => eq(product.category, `electronics`)) + .select(({ product }) => ({ + id: product.id, + name: product.name, + price: product.price, + category: product.category, + })), + }) onTestFinished(() => electronicsQuery.cleanup()) await electronicsQuery.preload() @@ -841,12 +814,18 @@ describe(`On-Demand Sync Mode`, () => { ) // Update Product A's price — still electronics, still matches - const productA = electronicsQuery.toArray.find((p) => p.name === `Product A`) - await db.execute(`UPDATE products SET price = 99 WHERE id = ?`, [productA!.id]) + const productA = electronicsQuery.toArray.find( + (p) => p.name === `Product A`, + ) + await db.execute(`UPDATE products SET price = 99 WHERE id = ?`, [ + productA!.id, + ]) await vi.waitFor( () => { - const updated = electronicsQuery.toArray.find((p) => p.name === `Product A`) + const updated = electronicsQuery.toArray.find( + (p) => p.name === `Product A`, + ) expect(updated?.price).toBe(99) }, { timeout: 2000 }, @@ -870,20 +849,18 @@ describe(`On-Demand Sync Mode`, () => { onTestFinished(() => collection.cleanup()) await collection.stateWhenReady() - const electronicsQuery = createCollection( - liveQueryCollectionOptions({ - query: (q) => - q - .from({ product: collection }) - .where(({ product }) => eq(product.category, `electronics`)) - .select(({ product }) => ({ - id: product.id, - name: product.name, - price: product.price, - category: product.category, - })), - }), - ) + const electronicsQuery = createLiveQueryCollection({ + query: (q) => + q + .from({ product: collection }) + .where(({ product }) => eq(product.category, `electronics`)) + .select(({ product }) => ({ + id: product.id, + name: product.name, + price: product.price, + category: product.category, + })), + }) onTestFinished(() => electronicsQuery.cleanup()) await electronicsQuery.preload() @@ -896,8 +873,13 @@ describe(`On-Demand Sync Mode`, () => { ) // Change Product A from electronics to clothing — no longer matches - const productA = electronicsQuery.toArray.find((p) => p.name === `Product A`) - await db.execute(`UPDATE products SET category = 'clothing' WHERE id = ?`, [productA!.id]) + const productA = electronicsQuery.toArray.find( + (p) => p.name === `Product A`, + ) + await db.execute( + `UPDATE products SET category = 'clothing' WHERE id = ?`, + [productA!.id], + ) await vi.waitFor( () => { @@ -924,20 +906,18 @@ describe(`On-Demand Sync Mode`, () => { onTestFinished(() => collection.cleanup()) await collection.stateWhenReady() - const electronicsQuery = createCollection( - liveQueryCollectionOptions({ - query: (q) => - q - .from({ product: collection }) - .where(({ product }) => eq(product.category, `electronics`)) - .select(({ product }) => ({ - id: product.id, - name: product.name, - price: product.price, - category: product.category, - })), - }), - ) + const electronicsQuery = createLiveQueryCollection({ + query: (q) => + q + .from({ product: collection }) + .where(({ product }) => eq(product.category, `electronics`)) + .select(({ product }) => ({ + id: product.id, + name: product.name, + price: product.price, + category: product.category, + })), + }) onTestFinished(() => electronicsQuery.cleanup()) await electronicsQuery.preload() @@ -955,7 +935,10 @@ describe(`On-Demand Sync Mode`, () => { const productC = await db.get<{ id: string }>( `SELECT id FROM products WHERE name = 'Product C'`, ) - await db.execute(`UPDATE products SET category = 'electronics' WHERE id = ?`, [productC.id]) + await db.execute( + `UPDATE products SET category = 'electronics' WHERE id = ?`, + [productC.id], + ) await vi.waitFor( () => { @@ -965,7 +948,12 @@ describe(`On-Demand Sync Mode`, () => { ) const names = electronicsQuery.toArray.map((p) => p.name).sort() - expect(names).toEqual([`Product A`, `Product B`, `Product C`, `Product D`]) + expect(names).toEqual([ + `Product A`, + `Product B`, + `Product C`, + `Product D`, + ]) }) it(`should handle DELETE of a matching row`, async () => { @@ -982,20 +970,18 @@ describe(`On-Demand Sync Mode`, () => { onTestFinished(() => collection.cleanup()) await collection.stateWhenReady() - const electronicsQuery = createCollection( - liveQueryCollectionOptions({ - query: (q) => - q - .from({ product: collection }) - .where(({ product }) => eq(product.category, `electronics`)) - .select(({ product }) => ({ - id: product.id, - name: product.name, - price: product.price, - category: product.category, - })), - }), - ) + const electronicsQuery = createLiveQueryCollection({ + query: (q) => + q + .from({ product: collection }) + .where(({ product }) => eq(product.category, `electronics`)) + .select(({ product }) => ({ + id: product.id, + name: product.name, + price: product.price, + category: product.category, + })), + }) onTestFinished(() => electronicsQuery.cleanup()) await electronicsQuery.preload() @@ -1008,7 +994,9 @@ describe(`On-Demand Sync Mode`, () => { ) // Delete Product A - const productA = electronicsQuery.toArray.find((p) => p.name === `Product A`) + const productA = electronicsQuery.toArray.find( + (p) => p.name === `Product A`, + ) await db.execute(`DELETE FROM products WHERE id = ?`, [productA!.id]) await vi.waitFor( @@ -1040,29 +1028,7 @@ describe(`On-Demand Sync Mode`, () => { // Rapidly create and destroy 5 live queries for (let i = 0; i < 5; i++) { - const query = createCollection( - liveQueryCollectionOptions({ - query: (q) => - q - .from({ product: collection }) - .where(({ product }) => eq(product.category, `electronics`)) - .select(({ product }) => ({ - id: product.id, - name: product.name, - price: product.price, - category: product.category, - })), - }), - ) - query.cleanup() - } - - // Give time for any async cleanup to settle - await new Promise((resolve) => setTimeout(resolve, 200)) - - // Collection should still be functional — create one more and verify it works - const finalQuery = createCollection( - liveQueryCollectionOptions({ + const query = createLiveQueryCollection({ query: (q) => q .from({ product: collection }) @@ -1073,8 +1039,26 @@ describe(`On-Demand Sync Mode`, () => { price: product.price, category: product.category, })), - }), - ) + }) + query.cleanup() + } + + // Give time for any async cleanup to settle + await new Promise((resolve) => setTimeout(resolve, 200)) + + // Collection should still be functional — create one more and verify it works + const finalQuery = createLiveQueryCollection({ + query: (q) => + q + .from({ product: collection }) + .where(({ product }) => eq(product.category, `electronics`)) + .select(({ product }) => ({ + id: product.id, + name: product.name, + price: product.price, + category: product.category, + })), + }) onTestFinished(() => finalQuery.cleanup()) await finalQuery.preload() @@ -1102,20 +1086,18 @@ describe(`On-Demand Sync Mode`, () => { await collection.stateWhenReady() // Create first query - const query1 = createCollection( - liveQueryCollectionOptions({ - query: (q) => - q - .from({ product: collection }) - .where(({ product }) => eq(product.category, `electronics`)) - .select(({ product }) => ({ - id: product.id, - name: product.name, - price: product.price, - category: product.category, - })), - }), - ) + const query1 = createLiveQueryCollection({ + query: (q) => + q + .from({ product: collection }) + .where(({ product }) => eq(product.category, `electronics`)) + .select(({ product }) => ({ + id: product.id, + name: product.name, + price: product.price, + category: product.category, + })), + }) await query1.preload() @@ -1132,20 +1114,18 @@ describe(`On-Demand Sync Mode`, () => { await new Promise((resolve) => setTimeout(resolve, 100)) // Re-create with same predicate - const query2 = createCollection( - liveQueryCollectionOptions({ - query: (q) => - q - .from({ product: collection }) - .where(({ product }) => eq(product.category, `electronics`)) - .select(({ product }) => ({ - id: product.id, - name: product.name, - price: product.price, - category: product.category, - })), - }), - ) + const query2 = createLiveQueryCollection({ + query: (q) => + q + .from({ product: collection }) + .where(({ product }) => eq(product.category, `electronics`)) + .select(({ product }) => ({ + id: product.id, + name: product.name, + price: product.price, + category: product.category, + })), + }) onTestFinished(() => query2.cleanup()) await query2.preload() @@ -1188,19 +1168,15 @@ describe(`On-Demand Sync Mode`, () => { await collection.stateWhenReady() // Query with no WHERE — selects all products - const allQuery = createCollection( - liveQueryCollectionOptions({ - query: (q) => - q - .from({ product: collection }) - .select(({ product }) => ({ - id: product.id, - name: product.name, - price: product.price, - category: product.category, - })), - }), - ) + const allQuery = createLiveQueryCollection({ + query: (q) => + q.from({ product: collection }).select(({ product }) => ({ + id: product.id, + name: product.name, + price: product.price, + category: product.category, + })), + }) onTestFinished(() => allQuery.cleanup()) await allQuery.preload() @@ -1227,20 +1203,18 @@ describe(`On-Demand Sync Mode`, () => { onTestFinished(() => collection.cleanup()) await collection.stateWhenReady() - const emptyQuery = createCollection( - liveQueryCollectionOptions({ - query: (q) => - q - .from({ product: collection }) - .where(({ product }) => eq(product.category, `furniture`)) - .select(({ product }) => ({ - id: product.id, - name: product.name, - price: product.price, - category: product.category, - })), - }), - ) + const emptyQuery = createLiveQueryCollection({ + query: (q) => + q + .from({ product: collection }) + .where(({ product }) => eq(product.category, `furniture`)) + .select(({ product }) => ({ + id: product.id, + name: product.name, + price: product.price, + category: product.category, + })), + }) onTestFinished(() => emptyQuery.cleanup()) await emptyQuery.preload() @@ -1266,52 +1240,47 @@ describe(`On-Demand Sync Mode`, () => { await collection.stateWhenReady() // Create three queries but don't await preload individually - const electronicsQuery = createCollection( - liveQueryCollectionOptions({ - query: (q) => - q - .from({ product: collection }) - .where(({ product }) => eq(product.category, `electronics`)) - .select(({ product }) => ({ - id: product.id, - name: product.name, - price: product.price, - category: product.category, - })), - }), - ) + const electronicsQuery = createLiveQueryCollection({ + query: (q) => + q + .from({ product: collection }) + .where(({ product }) => eq(product.category, `electronics`)) + .select(({ product }) => ({ + id: product.id, + name: product.name, + price: product.price, + category: product.category, + })), + }) onTestFinished(() => electronicsQuery.cleanup()) - const clothingQuery = createCollection( - liveQueryCollectionOptions({ - query: (q) => - q - .from({ product: collection }) - .where(({ product }) => eq(product.category, `clothing`)) - .select(({ product }) => ({ - id: product.id, - name: product.name, - price: product.price, - category: product.category, - })), - }), - ) + const clothingQuery = createLiveQueryCollection({ + query: (q) => + q + .from({ product: collection }) + .where(({ product }) => eq(product.category, `clothing`)) + .select(({ product }) => ({ + id: product.id, + name: product.name, + price: product.price, + category: product.category, + })), + }) onTestFinished(() => clothingQuery.cleanup()) - const expensiveQuery = createCollection( - liveQueryCollectionOptions({ - query: (q) => - q - .from({ product: collection }) - .where(({ product }) => gt(product.price, 100)) - .select(({ product }) => ({ - id: product.id, - name: product.name, - price: product.price, - category: product.category, - })), - }), - ) + const expensiveQuery = createLiveQueryCollection({ + query: (q) => + q + .from({ product: collection }) + .where(({ product }) => gt(product.price, 100)) + .select(({ product }) => ({ + id: product.id, + name: product.name, + price: product.price, + category: product.category, + })), + }) + onTestFinished(() => expensiveQuery.cleanup()) // Preload all concurrently @@ -1348,20 +1317,19 @@ describe(`On-Demand Sync Mode`, () => { await collection.stateWhenReady() // LQ1: electronics category — matches A(50), B(150), D(200) - const electronicsQuery = createCollection( - liveQueryCollectionOptions({ - query: (q) => - q - .from({ product: collection }) - .where(({ product }) => eq(product.category, `electronics`)) - .select(({ product }) => ({ - id: product.id, - name: product.name, - price: product.price, - category: product.category, - })), - }), - ) + const electronicsQuery = createLiveQueryCollection({ + query: (q) => + q + .from({ product: collection }) + .where(({ product }) => eq(product.category, `electronics`)) + .select(({ product }) => ({ + id: product.id, + name: product.name, + price: product.price, + category: product.category, + })), + }) + onTestFinished(() => electronicsQuery.cleanup()) await electronicsQuery.preload() @@ -1375,20 +1343,19 @@ describe(`On-Demand Sync Mode`, () => { // LQ2: price > 100 — matches B(150), D(200) // Products B and D overlap with LQ1 - const expensiveQuery = createCollection( - liveQueryCollectionOptions({ - query: (q) => - q - .from({ product: collection }) - .where(({ product }) => gt(product.price, 100)) - .select(({ product }) => ({ - id: product.id, - name: product.name, - price: product.price, - category: product.category, - })), - }), - ) + const expensiveQuery = createLiveQueryCollection({ + query: (q) => + q + .from({ product: collection }) + .where(({ product }) => gt(product.price, 100)) + .select(({ product }) => ({ + id: product.id, + name: product.name, + price: product.price, + category: product.category, + })), + }) + onTestFinished(() => expensiveQuery.cleanup()) await expensiveQuery.preload() @@ -1406,20 +1373,30 @@ describe(`On-Demand Sync Mode`, () => { expect(baseNames).toEqual([`Product A`, `Product B`, `Product D`]) // Both live queries return correct results over the shared data - const electronicsNames = electronicsQuery.toArray.map((p) => p.name).sort() + const electronicsNames = electronicsQuery.toArray + .map((p) => p.name) + .sort() expect(electronicsNames).toEqual([`Product A`, `Product B`, `Product D`]) const expensiveNames = expensiveQuery.toArray.map((p) => p.name).sort() expect(expensiveNames).toEqual([`Product B`, `Product D`]) // Update a shared row — both queries should see the change - const productB = expensiveQuery.toArray.find((p) => p.name === `Product B`) - await db.execute(`UPDATE products SET price = 175 WHERE id = ?`, [productB!.id]) + const productB = expensiveQuery.toArray.find( + (p) => p.name === `Product B`, + ) + await db.execute(`UPDATE products SET price = 175 WHERE id = ?`, [ + productB!.id, + ]) await vi.waitFor( () => { - const inElectronics = electronicsQuery.toArray.find((p) => p.name === `Product B`) - const inExpensive = expensiveQuery.toArray.find((p) => p.name === `Product B`) + const inElectronics = electronicsQuery.toArray.find( + (p) => p.name === `Product B`, + ) + const inExpensive = expensiveQuery.toArray.find( + (p) => p.name === `Product B`, + ) expect(inElectronics?.price).toBe(175) expect(inExpensive?.price).toBe(175) }, diff --git a/packages/powersync-db-collection/tests/sqlite-compiler.test.ts b/packages/powersync-db-collection/tests/sqlite-compiler.test.ts index 606096b37..5aa5ee36e 100644 --- a/packages/powersync-db-collection/tests/sqlite-compiler.test.ts +++ b/packages/powersync-db-collection/tests/sqlite-compiler.test.ts @@ -5,7 +5,8 @@ import { compileSQLite } from '../src/sqlite-compiler' const val = (value: T) => new IR.Value(value) // Helper to create expression nodes const ref = (path: Array) => new IR.PropRef(path) - const func = (name: string, args: Array) => new IR.Func(name, args) +const func = (name: string, args: Array) => + new IR.Func(name, args) describe(`SQLite Compiler`, () => { describe(`where clause compilation`, () => { @@ -239,7 +240,9 @@ describe(`SQLite Compiler`, () => { ], }) - expect(result.orderBy).toBe(`"category" NULLS LAST, "price" DESC NULLS LAST`) + expect(result.orderBy).toBe( + `"category" NULLS LAST, "price" DESC NULLS LAST`, + ) }) }) From fabfe738a7279ff859a1fe835d97cf8d3ce5859c Mon Sep 17 00:00:00 2001 From: Christiaan Landman Date: Wed, 11 Feb 2026 16:30:19 +0200 Subject: [PATCH 11/15] Reordered PoC, testing on-demand. --- packages/powersync-db-collection/package.json | 1 + .../powersync-db-collection/src/powersync.ts | 325 +++++++++--------- .../src/sqlite-compiler.ts | 2 +- .../tests/on-demand-sync.test.ts | 80 +++++ 4 files changed, 245 insertions(+), 163 deletions(-) diff --git a/packages/powersync-db-collection/package.json b/packages/powersync-db-collection/package.json index 587b3298c..0f763b3cf 100644 --- a/packages/powersync-db-collection/package.json +++ b/packages/powersync-db-collection/package.json @@ -55,6 +55,7 @@ "@standard-schema/spec": "^1.1.0", "@tanstack/db": "workspace:*", "@tanstack/store": "^0.8.0", + "async-mutex": "^0.5.0", "debug": "^4.4.3", "p-defer": "^4.0.1" }, diff --git a/packages/powersync-db-collection/src/powersync.ts b/packages/powersync-db-collection/src/powersync.ts index 5d206d4f3..39c6070e2 100644 --- a/packages/powersync-db-collection/src/powersync.ts +++ b/packages/powersync-db-collection/src/powersync.ts @@ -1,5 +1,10 @@ -import { or } from '@tanstack/db' +import { Mutex } from 'async-mutex' import { DiffTriggerOperation, sanitizeSQL } from '@powersync/common' +import { + or + + +} from '@tanstack/db' import { PendingOperationStore } from './PendingOperationStore' import { PowerSyncTransactor } from './PowerSyncTransactor' import { DEFAULT_BATCH_SIZE } from './definitions' @@ -7,6 +12,8 @@ import { asPowerSyncRecord, mapOperation } from './helpers' import { convertTableToSchema } from './schema' import { serializeForSQLite } from './serialization' import { compileSQLite } from './sqlite-compiler' +import type {LoadSubsetOptions, OperationType, + SyncConfig} from '@tanstack/db'; import type { AnyTableColumnType, ExtractedTable, @@ -26,7 +33,6 @@ import type { PowerSyncCollectionUtils, } from './definitions' import type { PendingOperation } from './PendingOperationStore' -import type { LoadSubsetOptions, SyncConfig } from '@tanstack/db' import type { StandardSchemaV1 } from '@standard-schema/spec' import type { Table, TriggerDiffRecord } from '@powersync/common' @@ -299,70 +305,15 @@ export function powerSyncCollectionOptions< */ const sync: SyncConfig = { sync: (params) => { - const { begin, write, commit, markReady } = params + const { begin, write, collection, commit, markReady } = params const abortController = new AbortController() - // Tracks all active WHERE expressions for on-demand sync filtering. // Each loadSubset call pushes its predicate; unloadSubset removes it by reference. const activeWhereExpressions: Array = [] - - /** - * Loads a subset of data from SQLite based on the provided options. - */ - const loadSubsetImpl = async ( - options: LoadSubsetOptions, - ): Promise => { - activeWhereExpressions.push(options.where) - try { - const compiled = compileSQLite(options) - - // Build the SELECT query - let sql = `SELECT * FROM ${viewName}` - const queryParams: Array = [...compiled.params] - - if (compiled.where) { - sql += ` WHERE ${compiled.where}` - } - - if (compiled.orderBy) { - sql += ` ORDER BY ${compiled.orderBy}` - } - - if (compiled.limit !== undefined) { - sql += ` LIMIT ?` - queryParams.push(compiled.limit) - } - - database.logger.debug?.( - `loadSubset for ${viewName}: ${sql}`, - queryParams, - ) - - const rows = await database.getAll(sql, queryParams) - - begin() - for (const row of rows) { - write({ - type: `insert`, - value: deserializeSyncRow(row), - }) - } - commit() - - database.logger.debug?.( - `loadSubset loaded ${rows.length} rows for ${viewName}`, - ) - } catch (error) { - database.logger.error(`loadSubset failed for ${viewName}`, error) - } - } - - const unloadSubsetImpl = (options: LoadSubsetOptions): void => { - const idx = activeWhereExpressions.indexOf(options.where) - if (idx !== -1) { - activeWhereExpressions.splice(idx, 1) - } - } + // Track pending cleanup operations to prevent operations on closed connections + + let currentDisposeTracking: (() => Promise) | null = null + const mutex = new Mutex() // The sync function needs to be synchronous async function start() { @@ -375,62 +326,9 @@ export function powerSyncCollectionOptions< await database .writeTransaction(async (context) => { begin() - - let selectSQL = `SELECT * FROM ${trackedTableName}` - let selectParams: Array = [] - - if (syncMode === 'on-demand') { - if (activeWhereExpressions.length === 0) { - // No active query yet — clear tracked table and skip - await context.execute(`DELETE FROM ${trackedTableName}`) - commit() - return - } - - // Combine all active predicates with OR - const combinedWhere = - activeWhereExpressions.length === 1 - ? activeWhereExpressions[0] - : or( - activeWhereExpressions[0]!, - activeWhereExpressions[1]!, - ...activeWhereExpressions.slice(2), - ) - - // Filter operations where the new value matches any active predicate - const compiled = compileSQLite( - { where: combinedWhere }, - { jsonColumn: 'value' }, - ) - - // Also filter UPDATEs where the previous value matched — this catches - // rows moving OUT of a predicate's scope (e.g. category changed from - // 'electronics' to 'clothing' while filtering for 'electronics') - const compiledPrev = compileSQLite( - { where: combinedWhere }, - { jsonColumn: 'previous_value' }, - ) - if (compiled.where) { - selectSQL += ` WHERE (${compiled.where})` - selectParams = [...compiled.params] - - if (compiledPrev.where) { - selectSQL += ` OR (operation = ? AND ${compiledPrev.where})` - selectParams.push( - DiffTriggerOperation.UPDATE, - ...compiledPrev.params, - ) - } - } - } - - selectSQL += ` ORDER BY timestamp ASC` - const operations = await context.getAll( - selectSQL, - selectParams, + `SELECT * FROM ${trackedTableName} ORDER BY timestamp ASC`, ) - const pendingOperations: Array = [] for (const op of operations) { @@ -480,65 +378,168 @@ export function powerSyncCollectionOptions< }, ) - const disposeTracking = await database.triggers.createDiffTrigger({ - source: viewName, - destination: trackedTableName, - when: { - [DiffTriggerOperation.INSERT]: `TRUE`, - [DiffTriggerOperation.UPDATE]: `TRUE`, - [DiffTriggerOperation.DELETE]: `TRUE`, - }, - hooks: { - beforeCreate: async (context) => { - if (syncMode === 'on-demand') { - // On-demand: skip initial load, mark ready immediately - markReady() - database.logger.info( - `Sync is ready for ${viewName} into ${trackedTableName} in on-demand mode`, - ) - return - } - - let currentBatchCount = syncBatchSize - let cursor = 0 - while (currentBatchCount == syncBatchSize) { - begin() - const batchItems = await context.getAll( - sanitizeSQL`SELECT * FROM ${viewName} LIMIT ? OFFSET ?`, - [syncBatchSize, cursor], - ) - currentBatchCount = batchItems.length - cursor += currentBatchCount - for (const row of batchItems) { - write({ - type: `insert`, - value: deserializeSyncRow(row), - }) - } - commit() - } - markReady() - database.logger.info( - `Sync is ready for ${viewName} into ${trackedTableName}`, - ) - }, - }, - }) + markReady() // If the abort controller was aborted while processing the request above if (abortController.signal.aborted) { - await disposeTracking() + await currentDisposeTracking?.() } else { abortController.signal.addEventListener( `abort`, () => { - disposeTracking() + currentDisposeTracking?.() }, { once: true }, ) } } + const loadSubsetImplInner = async ( + options?: LoadSubsetOptions, + ): Promise => { + + if (options) { + activeWhereExpressions.push(options.where) + } + + if (activeWhereExpressions.length === 0) { + await currentDisposeTracking?.() + currentDisposeTracking = null + return + } + + const combinedWhere = + activeWhereExpressions.length === 1 + ? activeWhereExpressions[0] + : or( + activeWhereExpressions[0]!, + activeWhereExpressions[1]!, + ...activeWhereExpressions.slice(2), + ) + + // Compile three variants of the WHERE clause: + // 1. For INSERT triggers: column refs use NEW.data + const compiledInsert = compileSQLite( + { where: combinedWhere }, + { jsonColumn: 'NEW.data' }, + ) + // 2. For DELETE triggers: column refs use OLD.data + const compiledDelete = compileSQLite( + { where: combinedWhere }, + { jsonColumn: 'OLD.data' }, + ) + // 3. For view-level SELECT: columns are direct (no jsonColumn) + const compiledView = compileSQLite({ where: combinedWhere }) + + const toSanitized = (compiled: { + where?: string + params: Array + }): string => { + if (!compiled.where) return 'TRUE' + const sqlParts = compiled.where.split('?') + return sanitizeSQL( + sqlParts as unknown as TemplateStringsArray, + ...compiled.params, + ) + } + + const insertWhenClause = toSanitized(compiledInsert) + const deleteWhenClause = toSanitized(compiledDelete) + const viewWhereClause = toSanitized(compiledView) + + await currentDisposeTracking?.() + + try { + currentDisposeTracking = await database.triggers.createDiffTrigger({ + source: viewName, + destination: trackedTableName, + when: { + [DiffTriggerOperation.INSERT]: insertWhenClause, + [DiffTriggerOperation.UPDATE]: `(${insertWhenClause}) OR (${deleteWhenClause})`, + [DiffTriggerOperation.DELETE]: deleteWhenClause, + }, + hooks: { + beforeCreate: async (context) => { + let currentBatchCount = syncBatchSize + let cursor = 0 + while (currentBatchCount == syncBatchSize) { + begin() + + const batchItems = await context.getAll( + `SELECT * FROM ${viewName} WHERE ${viewWhereClause} LIMIT ? OFFSET ?`, + [syncBatchSize, cursor], + ) + currentBatchCount = batchItems.length + cursor += currentBatchCount + for (const row of batchItems) { + const writeType: OperationType = collection.has(row.id) + ? `update` + : `insert` + write({ + type: writeType, + value: deserializeSyncRow(row), + }) + } + commit() + } + + // Only mark ready if not aborted to prevent invalid state transitions + if (!abortController.signal.aborted) { + markReady() + database.logger.info( + `Sync is ready for ${viewName} into ${trackedTableName}`, + ) + } + }, + }, + }) + } catch (error: any) { + // Ignore errors if cleanup was already initiated (closed remote) + if ( + !abortController.signal.aborted || + !error?.message?.includes('closed remote') + ) { + throw error + } + database.logger.debug( + 'Ignoring error during trigger creation after cleanup:', + error.message, + ) + } + } + + const loadSubsetImpl = (options: LoadSubsetOptions): Promise => { + return mutex.runExclusive(() => loadSubsetImplInner(options)) + } + + const unloadSubsetImplInner = async (options: LoadSubsetOptions) => { + const idx = activeWhereExpressions.indexOf(options.where) + if (idx !== -1) { + activeWhereExpressions.splice(idx, 1) + } + + try { + await currentDisposeTracking?.() + await loadSubsetImplInner() + } catch (error: any) { + // Ignore errors if cleanup was already initiated (closed remote) + if ( + !abortController.signal.aborted || + !error?.message?.includes('closed remote') + ) { + throw error + } + database.logger.debug( + 'Ignoring error during unload after cleanup:', + error.message, + ) + } + } + + const unloadSubsetImpl = (options: LoadSubsetOptions): Promise => { + return mutex.runExclusive(() => unloadSubsetImplInner(options)) + } + start().catch((error) => database.logger.error( `Could not start syncing process for ${viewName} into ${trackedTableName}`, diff --git a/packages/powersync-db-collection/src/sqlite-compiler.ts b/packages/powersync-db-collection/src/sqlite-compiler.ts index ee96f258f..3aaab2886 100644 --- a/packages/powersync-db-collection/src/sqlite-compiler.ts +++ b/packages/powersync-db-collection/src/sqlite-compiler.ts @@ -96,7 +96,7 @@ function compileExpression( } const columnName = exp.path[0]! if (compileOptions?.jsonColumn && columnName !== `id`) { - return `json_extract(${quoteIdentifier(compileOptions.jsonColumn)}, '$.${columnName}')` + return `json_extract(${compileOptions.jsonColumn}, '$.${columnName}')` } return quoteIdentifier(columnName) } diff --git a/packages/powersync-db-collection/tests/on-demand-sync.test.ts b/packages/powersync-db-collection/tests/on-demand-sync.test.ts index f41375544..29b0dbc42 100644 --- a/packages/powersync-db-collection/tests/on-demand-sync.test.ts +++ b/packages/powersync-db-collection/tests/on-demand-sync.test.ts @@ -33,6 +33,9 @@ describe(`On-Demand Sync Mode`, () => { schema: APP_SCHEMA, }) onTestFinished(async () => { + // Wait a moment for any pending cleanup operations to complete + // before closing the database to prevent "operation on closed remote" errors + await new Promise((resolve) => setTimeout(resolve, 100)) await db.disconnectAndClear() await db.close() }) @@ -1403,5 +1406,82 @@ describe(`On-Demand Sync Mode`, () => { { timeout: 2000 }, ) }) + + it(`should handle changing a live query's predicate by replacing the collection`, async () => { + const db = await createDatabase() + await createTestProducts(db) + + const collection = createCollection( + powerSyncCollectionOptions({ + database: db, + table: APP_SCHEMA.props.products, + syncMode: `on-demand`, + }), + ) + onTestFinished(() => collection.cleanup()) + await collection.stateWhenReady() + + // Start with all products (no WHERE) + let liveQuery = createLiveQueryCollection({ + query: (q) => + q.from({ product: collection }).select(({ product }) => ({ + id: product.id, + name: product.name, + price: product.price, + category: product.category, + })), + }) + + await liveQuery.preload() + + await vi.waitFor( + () => { + expect(liveQuery.size).toBe(5) + }, + { timeout: 2000 }, + ) + + // Switch to only electronics + liveQuery.cleanup() + + liveQuery = createLiveQueryCollection({ + query: (q) => + q + .from({ product: collection }) + .where(({ product }) => eq(product.category, `electronics`)) + .select(({ product }) => ({ + id: product.id, + name: product.name, + price: product.price, + category: product.category, + })), + }) + onTestFinished(() => liveQuery.cleanup()) + + await liveQuery.preload() + + await vi.waitFor( + () => { + expect(liveQuery.size).toBe(3) + }, + { timeout: 2000 }, + ) + + const names = liveQuery.toArray.map((p) => p.name).sort() + expect(names).toEqual([`Product A`, `Product B`, `Product D`]) + + // Verify reactive updates work on the new query + await db.execute(` + INSERT INTO products (id, name, price, category) + VALUES (uuid(), 'Product F', 99, 'electronics') + `) + + await vi.waitFor( + () => { + expect(liveQuery.size).toBe(4) + }, + { timeout: 2000 }, + ) + }) }) }) From 42cf91a1b754985c6801347530f2063dc5d72959 Mon Sep 17 00:00:00 2001 From: Christiaan Landman Date: Tue, 17 Feb 2026 12:54:20 +0200 Subject: [PATCH 12/15] Applying PoC to work alongside eager mode. --- .../powersync-db-collection/src/powersync.ts | 365 ++++++++++-------- 1 file changed, 197 insertions(+), 168 deletions(-) diff --git a/packages/powersync-db-collection/src/powersync.ts b/packages/powersync-db-collection/src/powersync.ts index 39c6070e2..6bb50a5b4 100644 --- a/packages/powersync-db-collection/src/powersync.ts +++ b/packages/powersync-db-collection/src/powersync.ts @@ -1,19 +1,14 @@ -import { Mutex } from 'async-mutex' import { DiffTriggerOperation, sanitizeSQL } from '@powersync/common' -import { - or - - -} from '@tanstack/db' +import { Mutex } from 'async-mutex' +import { or } from '@tanstack/db' +import { compileSQLite } from './sqlite-compiler' import { PendingOperationStore } from './PendingOperationStore' import { PowerSyncTransactor } from './PowerSyncTransactor' import { DEFAULT_BATCH_SIZE } from './definitions' import { asPowerSyncRecord, mapOperation } from './helpers' import { convertTableToSchema } from './schema' import { serializeForSQLite } from './serialization' -import { compileSQLite } from './sqlite-compiler' -import type {LoadSubsetOptions, OperationType, - SyncConfig} from '@tanstack/db'; +import type { LoadSubsetOptions, OperationType, SyncConfig } from '@tanstack/db' import type { AnyTableColumnType, ExtractedTable, @@ -34,7 +29,7 @@ import type { } from './definitions' import type { PendingOperation } from './PendingOperationStore' import type { StandardSchemaV1 } from '@standard-schema/spec' -import type { Table, TriggerDiffRecord } from '@powersync/common' +import type { LockContext, Table, TriggerDiffRecord } from '@powersync/common' /** * Creates PowerSync collection options for use with a standard Collection. @@ -233,7 +228,7 @@ export function powerSyncCollectionOptions< table, schema: inputSchema, syncBatchSize = DEFAULT_BATCH_SIZE, - syncMode = `eager`, + syncMode = 'eager', ...restConfig } = config @@ -307,16 +302,64 @@ export function powerSyncCollectionOptions< sync: (params) => { const { begin, write, collection, commit, markReady } = params const abortController = new AbortController() - // Tracks all active WHERE expressions for on-demand sync filtering. - // Each loadSubset call pushes its predicate; unloadSubset removes it by reference. - const activeWhereExpressions: Array = [] - // Track pending cleanup operations to prevent operations on closed connections - - let currentDisposeTracking: (() => Promise) | null = null - const mutex = new Mutex() - - // The sync function needs to be synchronous - async function start() { + + let disposeTracking: (() => Promise) | null = null + + if (syncMode === `eager`) { + return runEagerSync() + } else { + return runOnDemandSync() + } + + async function createDiffTrigger(options: { + when: Record + writeType: (rowId: string) => OperationType + batchQuery: ( + lockContext: LockContext, + batchSize: number, + cursor: number, + ) => Promise> + onReady: () => void + }) { + const { when, writeType, batchQuery, onReady } = options + + return await database.triggers.createDiffTrigger({ + source: viewName, + destination: trackedTableName, + when, + hooks: { + beforeCreate: async (context) => { + let currentBatchCount = syncBatchSize + let cursor = 0 + while (currentBatchCount == syncBatchSize) { + begin() + + const batchItems = await batchQuery( + context, + syncBatchSize, + cursor, + ) + currentBatchCount = batchItems.length + cursor += currentBatchCount + for (const row of batchItems) { + write({ + type: writeType(row.id), + value: deserializeSyncRow(row), + }) + } + commit() + } + onReady() + database.logger.info( + `Sync is ready for ${viewName} into ${trackedTableName}`, + ) + }, + }, + }) + } + + // The sync function needs to be synchronous. + async function start(afterOnChangeRegistered?: () => Promise) { database.logger.info( `Sync is starting for ${viewName} into ${trackedTableName}`, ) @@ -378,184 +421,170 @@ export function powerSyncCollectionOptions< }, ) - markReady() + await afterOnChangeRegistered?.() // If the abort controller was aborted while processing the request above if (abortController.signal.aborted) { - await currentDisposeTracking?.() + await disposeTracking?.() } else { abortController.signal.addEventListener( `abort`, () => { - currentDisposeTracking?.() + disposeTracking?.() }, { once: true }, ) } } - const loadSubsetImplInner = async ( - options?: LoadSubsetOptions, - ): Promise => { - - if (options) { - activeWhereExpressions.push(options.where) - } + // Eager mode. + // Registers a diff trigger for the entire table. + function runEagerSync() { + start(async () => { + disposeTracking = await createDiffTrigger({ + when: { + [DiffTriggerOperation.INSERT]: `TRUE`, + [DiffTriggerOperation.UPDATE]: `TRUE`, + [DiffTriggerOperation.DELETE]: `TRUE`, + }, + writeType: (_rowId: string) => `insert`, + batchQuery: ( + lockContext: LockContext, + batchSize: number, + cursor: number, + ) => + lockContext.getAll( + sanitizeSQL`SELECT * FROM ${viewName} LIMIT ? OFFSET ?`, + [batchSize, cursor], + ), + onReady: () => markReady(), + }) + }).catch((error) => + database.logger.error( + `Could not start syncing process for ${viewName} into ${trackedTableName}`, + error, + ), + ) - if (activeWhereExpressions.length === 0) { - await currentDisposeTracking?.() - currentDisposeTracking = null - return + return () => { + database.logger.info( + `Sync has been stopped for ${viewName} into ${trackedTableName}`, + ) + abortController.abort() } + } - const combinedWhere = - activeWhereExpressions.length === 1 - ? activeWhereExpressions[0] - : or( - activeWhereExpressions[0]!, - activeWhereExpressions[1]!, - ...activeWhereExpressions.slice(2), - ) - - // Compile three variants of the WHERE clause: - // 1. For INSERT triggers: column refs use NEW.data - const compiledInsert = compileSQLite( - { where: combinedWhere }, - { jsonColumn: 'NEW.data' }, - ) - // 2. For DELETE triggers: column refs use OLD.data - const compiledDelete = compileSQLite( - { where: combinedWhere }, - { jsonColumn: 'OLD.data' }, + // On-demand mode. + // Registers a diff trigger for the active WHERE expressions. + function runOnDemandSync() { + start().catch((error) => + database.logger.error( + `Could not start syncing process for ${viewName} into ${trackedTableName}`, + error, + ), ) - // 3. For view-level SELECT: columns are direct (no jsonColumn) - const compiledView = compileSQLite({ where: combinedWhere }) - - const toSanitized = (compiled: { - where?: string - params: Array - }): string => { - if (!compiled.where) return 'TRUE' - const sqlParts = compiled.where.split('?') - return sanitizeSQL( - sqlParts as unknown as TemplateStringsArray, - ...compiled.params, - ) - } - const insertWhenClause = toSanitized(compiledInsert) - const deleteWhenClause = toSanitized(compiledDelete) - const viewWhereClause = toSanitized(compiledView) + // Tracks all active WHERE expressions for on-demand sync filtering. + // Each loadSubset call pushes its predicate; unloadSubset removes it. + const activeWhereExpressions: Array = [] + const mutex = new Mutex() - await currentDisposeTracking?.() + const loadSubset = async ( + options?: LoadSubsetOptions, + ): Promise => { + if (options) { + activeWhereExpressions.push(options.where) + } - try { - currentDisposeTracking = await database.triggers.createDiffTrigger({ - source: viewName, - destination: trackedTableName, - when: { - [DiffTriggerOperation.INSERT]: insertWhenClause, - [DiffTriggerOperation.UPDATE]: `(${insertWhenClause}) OR (${deleteWhenClause})`, - [DiffTriggerOperation.DELETE]: deleteWhenClause, - }, - hooks: { - beforeCreate: async (context) => { - let currentBatchCount = syncBatchSize - let cursor = 0 - while (currentBatchCount == syncBatchSize) { - begin() + if (activeWhereExpressions.length === 0) { + await disposeTracking?.() + return + } - const batchItems = await context.getAll( - `SELECT * FROM ${viewName} WHERE ${viewWhereClause} LIMIT ? OFFSET ?`, - [syncBatchSize, cursor], - ) - currentBatchCount = batchItems.length - cursor += currentBatchCount - for (const row of batchItems) { - const writeType: OperationType = collection.has(row.id) - ? `update` - : `insert` - write({ - type: writeType, - value: deserializeSyncRow(row), - }) - } - commit() - } + const combinedWhere = + activeWhereExpressions.length === 1 + ? activeWhereExpressions[0] + : or( + activeWhereExpressions[0]!, + activeWhereExpressions[1]!, + ...activeWhereExpressions.slice(2), + ) + + const compiledNewData = compileSQLite( + { where: combinedWhere }, + { jsonColumn: 'NEW.data' }, + ) - // Only mark ready if not aborted to prevent invalid state transitions - if (!abortController.signal.aborted) { - markReady() - database.logger.info( - `Sync is ready for ${viewName} into ${trackedTableName}`, - ) - } - }, - }, - }) - } catch (error: any) { - // Ignore errors if cleanup was already initiated (closed remote) - if ( - !abortController.signal.aborted || - !error?.message?.includes('closed remote') - ) { - throw error - } - database.logger.debug( - 'Ignoring error during trigger creation after cleanup:', - error.message, + const compiledOldData = compileSQLite( + { where: combinedWhere }, + { jsonColumn: 'OLD.data' }, ) - } - } - const loadSubsetImpl = (options: LoadSubsetOptions): Promise => { - return mutex.runExclusive(() => loadSubsetImplInner(options)) - } + const compiledView = compileSQLite({ where: combinedWhere }) + + const toInlinedWhereClause = (compiled: { + where?: string + params: Array + }): string => { + if (!compiled.where) return 'TRUE' + const sqlParts = compiled.where.split('?') + return sanitizeSQL( + sqlParts as unknown as TemplateStringsArray, + ...compiled.params, + ) + } - const unloadSubsetImplInner = async (options: LoadSubsetOptions) => { - const idx = activeWhereExpressions.indexOf(options.where) - if (idx !== -1) { - activeWhereExpressions.splice(idx, 1) + const newDataWhenClause = toInlinedWhereClause(compiledNewData) + const oldDataWhenClause = toInlinedWhereClause(compiledOldData) + const viewWhereClause = toInlinedWhereClause(compiledView) + + await disposeTracking?.() + + disposeTracking = await createDiffTrigger({ + when: { + [DiffTriggerOperation.INSERT]: newDataWhenClause, + [DiffTriggerOperation.UPDATE]: `(${newDataWhenClause}) OR (${oldDataWhenClause})`, + [DiffTriggerOperation.DELETE]: oldDataWhenClause, + }, + writeType: (rowId: string) => + collection.has(rowId) ? `update` : `insert`, + batchQuery: ( + lockContext: LockContext, + batchSize: number, + cursor: number, + ) => + lockContext.getAll( + `SELECT * FROM ${viewName} WHERE ${viewWhereClause} LIMIT ? OFFSET ?`, + [batchSize, cursor], + ), + onReady: () => {}, + }) } - try { - await currentDisposeTracking?.() - await loadSubsetImplInner() - } catch (error: any) { - // Ignore errors if cleanup was already initiated (closed remote) - if ( - !abortController.signal.aborted || - !error?.message?.includes('closed remote') - ) { - throw error + const unloadSubset = async (options: LoadSubsetOptions) => { + const idx = activeWhereExpressions.indexOf(options.where) + if (idx !== -1) { + activeWhereExpressions.splice(idx, 1) } - database.logger.debug( - 'Ignoring error during unload after cleanup:', - error.message, - ) - } - } - const unloadSubsetImpl = (options: LoadSubsetOptions): Promise => { - return mutex.runExclusive(() => unloadSubsetImplInner(options)) - } + // Recreate the diff trigger for the remaining active WHERE expressions. + await loadSubset() + } - start().catch((error) => - database.logger.error( - `Could not start syncing process for ${viewName} into ${trackedTableName}`, - error, - ), - ) + markReady() - return { - cleanup: () => { - database.logger.info( - `Sync has been stopped for ${viewName} into ${trackedTableName}`, - ) - abortController.abort() - }, - loadSubset: syncMode === `on-demand` ? loadSubsetImpl : undefined, - unloadSubset: syncMode === `on-demand` ? unloadSubsetImpl : undefined, + return { + cleanup: () => { + database.logger.info( + `Sync has been stopped for ${viewName} into ${trackedTableName}`, + ) + abortController.abort() + }, + loadSubset: (options: LoadSubsetOptions) => + mutex.runExclusive(() => loadSubset(options)), + unloadSubset: (options: LoadSubsetOptions) => () => + unloadSubset(options), + } } }, // Expose the getSyncMetadata function From 3ff88e471a61c3cf32fe95ef3d222eac59b71325 Mon Sep 17 00:00:00 2001 From: Christiaan Landman Date: Tue, 17 Feb 2026 16:29:28 +0200 Subject: [PATCH 13/15] Cleanup. --- .../powersync-db-collection/src/powersync.ts | 38 +++++++++---------- 1 file changed, 18 insertions(+), 20 deletions(-) diff --git a/packages/powersync-db-collection/src/powersync.ts b/packages/powersync-db-collection/src/powersync.ts index 6bb50a5b4..3b2d79227 100644 --- a/packages/powersync-db-collection/src/powersync.ts +++ b/packages/powersync-db-collection/src/powersync.ts @@ -304,7 +304,7 @@ export function powerSyncCollectionOptions< const abortController = new AbortController() let disposeTracking: (() => Promise) | null = null - + if (syncMode === `eager`) { return runEagerSync() } else { @@ -483,24 +483,22 @@ export function powerSyncCollectionOptions< error, ), ) - + // Tracks all active WHERE expressions for on-demand sync filtering. // Each loadSubset call pushes its predicate; unloadSubset removes it. const activeWhereExpressions: Array = [] const mutex = new Mutex() - - const loadSubset = async ( - options?: LoadSubsetOptions, - ): Promise => { + + const loadSubset = async (options?: LoadSubsetOptions): Promise => { if (options) { activeWhereExpressions.push(options.where) } - + if (activeWhereExpressions.length === 0) { await disposeTracking?.() return } - + const combinedWhere = activeWhereExpressions.length === 1 ? activeWhereExpressions[0] @@ -509,19 +507,19 @@ export function powerSyncCollectionOptions< activeWhereExpressions[1]!, ...activeWhereExpressions.slice(2), ) - + const compiledNewData = compileSQLite( { where: combinedWhere }, { jsonColumn: 'NEW.data' }, ) - + const compiledOldData = compileSQLite( { where: combinedWhere }, { jsonColumn: 'OLD.data' }, ) const compiledView = compileSQLite({ where: combinedWhere }) - + const toInlinedWhereClause = (compiled: { where?: string params: Array @@ -533,13 +531,13 @@ export function powerSyncCollectionOptions< ...compiled.params, ) } - + const newDataWhenClause = toInlinedWhereClause(compiledNewData) const oldDataWhenClause = toInlinedWhereClause(compiledOldData) const viewWhereClause = toInlinedWhereClause(compiledView) - + await disposeTracking?.() - + disposeTracking = await createDiffTrigger({ when: { [DiffTriggerOperation.INSERT]: newDataWhenClause, @@ -560,19 +558,19 @@ export function powerSyncCollectionOptions< onReady: () => {}, }) } - + const unloadSubset = async (options: LoadSubsetOptions) => { const idx = activeWhereExpressions.indexOf(options.where) if (idx !== -1) { activeWhereExpressions.splice(idx, 1) } - + // Recreate the diff trigger for the remaining active WHERE expressions. await loadSubset() } - + markReady() - + return { cleanup: () => { database.logger.info( @@ -582,8 +580,8 @@ export function powerSyncCollectionOptions< }, loadSubset: (options: LoadSubsetOptions) => mutex.runExclusive(() => loadSubset(options)), - unloadSubset: (options: LoadSubsetOptions) => () => - unloadSubset(options), + unloadSubset: (options: LoadSubsetOptions) => + mutex.runExclusive(() => unloadSubset(options)), } } }, From e6e876f033a36c684281b395c92f7055f680c74e Mon Sep 17 00:00:00 2001 From: Christiaan Landman Date: Tue, 17 Feb 2026 17:01:22 +0200 Subject: [PATCH 14/15] Evict stale data on unload. --- .../powersync-db-collection/src/powersync.ts | 64 ++++++++++++++----- .../tests/on-demand-sync.test.ts | 32 +++++++--- 2 files changed, 70 insertions(+), 26 deletions(-) diff --git a/packages/powersync-db-collection/src/powersync.ts b/packages/powersync-db-collection/src/powersync.ts index 3b2d79227..a35b0d777 100644 --- a/packages/powersync-db-collection/src/powersync.ts +++ b/packages/powersync-db-collection/src/powersync.ts @@ -488,7 +488,7 @@ export function powerSyncCollectionOptions< // Each loadSubset call pushes its predicate; unloadSubset removes it. const activeWhereExpressions: Array = [] const mutex = new Mutex() - + const loadSubset = async (options?: LoadSubsetOptions): Promise => { if (options) { activeWhereExpressions.push(options.where) @@ -512,26 +512,14 @@ export function powerSyncCollectionOptions< { where: combinedWhere }, { jsonColumn: 'NEW.data' }, ) - + const compiledOldData = compileSQLite( { where: combinedWhere }, { jsonColumn: 'OLD.data' }, ) const compiledView = compileSQLite({ where: combinedWhere }) - - const toInlinedWhereClause = (compiled: { - where?: string - params: Array - }): string => { - if (!compiled.where) return 'TRUE' - const sqlParts = compiled.where.split('?') - return sanitizeSQL( - sqlParts as unknown as TemplateStringsArray, - ...compiled.params, - ) - } - + const newDataWhenClause = toInlinedWhereClause(compiledNewData) const oldDataWhenClause = toInlinedWhereClause(compiledOldData) const viewWhereClause = toInlinedWhereClause(compiledView) @@ -558,13 +546,57 @@ export function powerSyncCollectionOptions< onReady: () => {}, }) } + + const toInlinedWhereClause = (compiled: { + where?: string + params: Array + }): string => { + if (!compiled.where) return 'TRUE' + const sqlParts = compiled.where.split('?') + return sanitizeSQL( + sqlParts as unknown as TemplateStringsArray, + ...compiled.params, + ) + } const unloadSubset = async (options: LoadSubsetOptions) => { const idx = activeWhereExpressions.indexOf(options.where) if (idx !== -1) { activeWhereExpressions.splice(idx, 1) } - + + // Evict rows that were exclusively loaded by the departing predicate. + // These are rows matching the departing WHERE that are no longer covered + // by any remaining active predicate. + const compiledDeparting = compileSQLite({ where: options.where }) + const departingWhereSQL = toInlinedWhereClause(compiledDeparting) + + let evictionSQL: string + if (activeWhereExpressions.length === 0) { + evictionSQL = `SELECT id FROM ${viewName} WHERE ${departingWhereSQL}` + } else { + const combinedRemaining = + activeWhereExpressions.length === 1 + ? activeWhereExpressions[0]! + : or( + activeWhereExpressions[0]!, + activeWhereExpressions[1]!, + ...activeWhereExpressions.slice(2), + ) + const compiledRemaining = compileSQLite({ where: combinedRemaining }) + const remainingWhereSQL = toInlinedWhereClause(compiledRemaining) + evictionSQL = `SELECT id FROM ${viewName} WHERE (${departingWhereSQL}) AND NOT (${remainingWhereSQL})` + } + + const rowsToEvict = await database.getAll<{ id: string }>(evictionSQL) + if (rowsToEvict.length > 0) { + begin() + for (const { id } of rowsToEvict) { + write({ type: `delete`, key: id }) + } + commit() + } + // Recreate the diff trigger for the remaining active WHERE expressions. await loadSubset() } diff --git a/packages/powersync-db-collection/tests/on-demand-sync.test.ts b/packages/powersync-db-collection/tests/on-demand-sync.test.ts index 29b0dbc42..a23210471 100644 --- a/packages/powersync-db-collection/tests/on-demand-sync.test.ts +++ b/packages/powersync-db-collection/tests/on-demand-sync.test.ts @@ -547,11 +547,19 @@ describe(`On-Demand Sync Mode`, () => { { timeout: 2000 }, ) - const collectionSizeBeforeCleanup = collection.size + const electronicsCount = electronicsQuery.size // 3 - // Kill LQ2 — its predicate should be removed via unloadSubset + // Kill LQ2 — its predicate should be removed and its rows evicted clothingQuery.cleanup() + // Wait for clothing rows to be evicted; collection shrinks to electronics-only + await vi.waitFor( + () => { + expect(collection.size).toBe(electronicsCount) + }, + { timeout: 2000 }, + ) + // Insert a new clothing item — should NOT be picked up since LQ2 is gone await db.execute(` INSERT INTO products (id, name, price, category) @@ -561,9 +569,8 @@ describe(`On-Demand Sync Mode`, () => { // Wait to allow any (incorrect) propagation await new Promise((resolve) => setTimeout(resolve, 200)) - // Base collection should not have grown from the clothing insert - // Only electronics predicate is active, and 'New Shirt' is clothing - expect(collection.size).toBe(collectionSizeBeforeCleanup) + // Collection should not have grown — clothing predicate is no longer active + expect(collection.size).toBe(electronicsCount) // Insert a new electronics item — should still be picked up by LQ1 await db.execute(` @@ -578,11 +585,16 @@ describe(`On-Demand Sync Mode`, () => { { timeout: 2000 }, ) - const collectionSizeAfterGadget = collection.size - - // Kill LQ1 — no active predicates remain + // Kill LQ1 — no active predicates remain; electronics rows should be evicted electronicsQuery.cleanup() + await vi.waitFor( + () => { + expect(collection.size).toBe(0) + }, + { timeout: 2000 }, + ) + // Insert items matching both former predicates — neither should be picked up await db.execute(` INSERT INTO products (id, name, price, category) @@ -595,8 +607,8 @@ describe(`On-Demand Sync Mode`, () => { await new Promise((resolve) => setTimeout(resolve, 200)) - // Base collection should not have grown — no active predicates - expect(collection.size).toBe(collectionSizeAfterGadget) + // Collection should remain empty — no active predicates + expect(collection.size).toBe(0) }) describe(`Basic loadSubset behavior`, () => { From ca4006471eb83de86233073b7740a80caa260a4b Mon Sep 17 00:00:00 2001 From: Christiaan Landman Date: Wed, 18 Feb 2026 14:07:55 +0200 Subject: [PATCH 15/15] Added sqlite compiler support for length, concat, add. --- .../src/sqlite-compiler.ts | 23 +++++++++- .../tests/sqlite-compiler.test.ts | 42 +++++++++++++++++++ 2 files changed, 64 insertions(+), 1 deletion(-) diff --git a/packages/powersync-db-collection/src/sqlite-compiler.ts b/packages/powersync-db-collection/src/sqlite-compiler.ts index 3aaab2886..e2df875fc 100644 --- a/packages/powersync-db-collection/src/sqlite-compiler.ts +++ b/packages/powersync-db-collection/src/sqlite-compiler.ts @@ -295,6 +295,27 @@ function compileFunction( return `LOWER(${compiledArgs[0]})` } + case `length`: { + if (compiledArgs.length !== 1) { + throw new Error(`length expects 1 argument`) + } + return `LENGTH(${compiledArgs[0]})` + } + + case `concat`: { + if (compiledArgs.length < 1) { + throw new Error(`concat expects at least 1 argument`) + } + return `CONCAT(${compiledArgs.join(`, `)})` + } + + case `add`: { + if (compiledArgs.length !== 2) { + throw new Error(`add expects 2 arguments`) + } + return `${compiledArgs[0]} + ${compiledArgs[1]}` + } + // Null fallback case `coalesce`: { if (compiledArgs.length < 1) { @@ -306,7 +327,7 @@ function compileFunction( default: throw new Error( `Operator '${name}' is not supported in PowerSync on-demand sync. ` + - `Supported operators: eq, gt, gte, lt, lte, and, or, not, isNull, in, like, ilike, upper, lower, coalesce`, + `Supported operators: eq, gt, gte, lt, lte, and, or, not, isNull, in, like, ilike, upper, lower, length, concat, add, coalesce`, ) } } diff --git a/packages/powersync-db-collection/tests/sqlite-compiler.test.ts b/packages/powersync-db-collection/tests/sqlite-compiler.test.ts index 5aa5ee36e..59c7d5d81 100644 --- a/packages/powersync-db-collection/tests/sqlite-compiler.test.ts +++ b/packages/powersync-db-collection/tests/sqlite-compiler.test.ts @@ -176,6 +176,48 @@ describe(`SQLite Compiler`, () => { expect(result.params).toEqual([`default`, `test`]) }) + it(`should compile length function`, () => { + const result = compileSQLite({ + where: func(`gt`, [func(`length`, [ref([`name`])]), val(5)]), + }) + + expect(result.where).toBe(`LENGTH("name") > ?`) + expect(result.params).toEqual([5]) + }) + + it(`should compile concat function with multiple args`, () => { + const result = compileSQLite({ + where: func(`eq`, [ + func(`concat`, [ref([`first_name`]), val(` `), ref([`last_name`])]), + val(`John Doe`), + ]), + }) + + expect(result.where).toBe(`CONCAT("first_name", ?, "last_name") = ?`) + expect(result.params).toEqual([` `, `John Doe`]) + }) + + it(`should compile add operator`, () => { + const result = compileSQLite({ + where: func(`gt`, [func(`add`, [ref([`price`]), val(10)]), val(100)]), + }) + + expect(result.where).toBe(`"price" + ? > ?`) + expect(result.params).toEqual([10, 100]) + }) + + it(`should throw for length with wrong arg count`, () => { + expect(() => + compileSQLite({ where: func(`length`, [ref([`a`]), ref([`b`])]) }), + ).toThrow(`length expects 1 argument`) + }) + + it(`should throw for add with wrong arg count`, () => { + expect(() => + compileSQLite({ where: func(`add`, [ref([`price`])]) }), + ).toThrow(`add expects 2 arguments`) + }) + it(`should throw error for unsupported operators`, () => { expect(() => compileSQLite({