From 8b2a49035a0d67bce1fbb9a0dc5e2f7eae729ae5 Mon Sep 17 00:00:00 2001 From: Claude Date: Fri, 5 Dec 2025 21:10:20 +0000 Subject: [PATCH 1/5] feat(durable-stream-db-collection): add new package for Durable Streams sync Implement @tanstack/durable-stream-db-collection, a TanStack DB collection that syncs data from Durable Streams servers in JSON mode. Key features: - Offset-resumable sync with automatic persistence - Batch-level offset handling with client-side deduplication - Configurable live modes (long-poll, SSE) - Custom storage adapters for offset persistence - Full TypeScript support with Standard Schema integration The collection attaches batch offsets to each row and handles the semantic difference between batch-level and row-level offsets transparently through the getDeduplicationKey callback. --- .../durable-stream-db-collection/README.md | 240 +++++++ .../durable-stream-db-collection/package.json | 65 ++ .../src/collection.ts | 194 ++++++ .../src/durable-streams-client.d.ts | 90 +++ .../durable-stream-db-collection/src/index.ts | 7 + .../src/offset-storage.ts | 123 ++++ .../durable-stream-db-collection/src/types.ts | 173 +++++ .../tests/collection.test-d.ts | 204 ++++++ .../tests/collection.test.ts | 603 ++++++++++++++++++ .../tsconfig.json | 20 + .../vite.config.ts | 24 + 11 files changed, 1743 insertions(+) create mode 100644 packages/durable-stream-db-collection/README.md create mode 100644 packages/durable-stream-db-collection/package.json create mode 100644 packages/durable-stream-db-collection/src/collection.ts create mode 100644 packages/durable-stream-db-collection/src/durable-streams-client.d.ts create mode 100644 packages/durable-stream-db-collection/src/index.ts create mode 100644 packages/durable-stream-db-collection/src/offset-storage.ts create mode 100644 packages/durable-stream-db-collection/src/types.ts create mode 100644 packages/durable-stream-db-collection/tests/collection.test-d.ts create mode 100644 packages/durable-stream-db-collection/tests/collection.test.ts create mode 100644 packages/durable-stream-db-collection/tsconfig.json create mode 100644 packages/durable-stream-db-collection/vite.config.ts diff --git a/packages/durable-stream-db-collection/README.md b/packages/durable-stream-db-collection/README.md new file mode 100644 index 000000000..f87c95d8e --- /dev/null +++ b/packages/durable-stream-db-collection/README.md @@ -0,0 +1,240 @@ +# @tanstack/durable-stream-db-collection + +TanStack DB collection for [Durable Streams](https://github.com/durable-streams/durable-streams). + +## Installation + +```bash +npm install @tanstack/durable-stream-db-collection @tanstack/db @durable-streams/client +``` + +> **Note:** `@durable-streams/client` is a peer dependency. Install a compatible Durable Streams client that implements the [Durable Streams protocol](https://github.com/durable-streams/durable-streams). + +## Quick Start + +```typescript +import { createCollection } from '@tanstack/db' +import { durableStreamCollectionOptions } from '@tanstack/durable-stream-db-collection' + +const eventsCollection = createCollection( + durableStreamCollectionOptions({ + url: 'https://api.example.com/v1/stream/events', + getKey: (row) => row.id, + getDeduplicationKey: (row) => `${row.id}:${row.seq}`, + }) +) +``` + +## Key Concepts + +### Batch-Level Offsets + +Durable Streams uses batch-level offsets, not row-level. When resuming from an offset, the entire batch may replay. This package handles deduplication automatically using your `getDeduplicationKey` function. + +### JSON Mode Requirement + +This package requires Durable Streams servers running in **JSON mode** (`content-type: application/json`). In JSON mode: + +- Each append is a valid JSON value +- Reads return parsed JSON arrays +- Message boundaries are preserved + +### Read-Only + +This collection is read-only. To write data, use your stream's append endpoint directly or through a wrapper protocol. + +### Offset Persistence + +Offsets are automatically persisted to localStorage (configurable) for cross-session resumption. + +## API Reference + +### `durableStreamCollectionOptions` + +Creates TanStack DB collection configuration for a Durable Stream. + +```typescript +interface DurableStreamCollectionConfig { + // Required + url: string // URL of the Durable Stream endpoint + getKey: (row: TRow) => string | number // Extract primary key from row + getDeduplicationKey: (row: TRow) => string // Extract deduplication key from row + + // Optional + id?: string // Collection ID (auto-generated from URL if not provided) + schema?: StandardSchemaV1 // Standard Schema for validation + initialOffset?: string // Initial offset (default: '-1' for beginning) + headers?: Record // HTTP headers for requests + reconnectDelay?: number // Delay before reconnecting after error (default: 5000ms) + liveMode?: 'long-poll' | 'sse' // Live mode (default: 'long-poll') + storageKey?: string | false // Storage key prefix (default: 'durable-stream') + storage?: OffsetStorage // Custom storage adapter +} +``` + +### Output Type + +Each row from the collection includes the batch offset: + +```typescript +type RowWithOffset = TRow & { offset: string } +``` + +## Usage Examples + +### Basic Usage + +```typescript +import { createCollection } from '@tanstack/db' +import { durableStreamCollectionOptions } from '@tanstack/durable-stream-db-collection' + +interface Event { + id: string + type: string + payload: unknown + timestamp: string +} + +const eventsCollection = createCollection( + durableStreamCollectionOptions({ + url: 'https://api.example.com/v1/stream/events', + getKey: (row) => row.id, + getDeduplicationKey: (row) => row.id, + }) +) + +// Preload the collection +await eventsCollection.preload() + +// Access data +const events = eventsCollection.toArray +console.log(`Loaded ${events.length} events`) +``` + +### With Schema Validation + +```typescript +import { z } from 'zod' +import { createCollection } from '@tanstack/db' +import { durableStreamCollectionOptions } from '@tanstack/durable-stream-db-collection' + +const eventSchema = z.object({ + id: z.string(), + type: z.string(), + payload: z.unknown(), + timestamp: z.string(), + seq: z.number(), +}) + +type Event = z.infer + +const eventsCollection = createCollection( + durableStreamCollectionOptions({ + url: 'https://api.example.com/v1/stream/events', + getKey: (row) => row.id, + getDeduplicationKey: (row) => `${row.id}:${row.seq}`, + schema: eventSchema, + }) +) +``` + +### With Authentication + +```typescript +const eventsCollection = createCollection( + durableStreamCollectionOptions({ + url: 'https://api.example.com/v1/stream/events', + getKey: (row) => row.id, + getDeduplicationKey: (row) => row.id, + headers: { + 'Authorization': `Bearer ${token}`, + }, + }) +) +``` + +### Custom Storage Adapter + +```typescript +// For React Native with AsyncStorage +import AsyncStorage from '@react-native-async-storage/async-storage' + +const eventsCollection = createCollection( + durableStreamCollectionOptions({ + url: 'https://api.example.com/v1/stream/events', + getKey: (row) => row.id, + getDeduplicationKey: (row) => row.id, + storage: AsyncStorage, + }) +) +``` + +### Disable Offset Persistence + +```typescript +const eventsCollection = createCollection( + durableStreamCollectionOptions({ + url: 'https://api.example.com/v1/stream/events', + getKey: (row) => row.id, + getDeduplicationKey: (row) => row.id, + storageKey: false, // No persistence + }) +) +``` + +### With React + +```typescript +import { useLiveQuery } from '@tanstack/react-db' +import { eq } from '@tanstack/db' + +function EventList() { + const { data: events } = useLiveQuery((q) => + q.from({ event: eventsCollection }) + .where(({ event }) => eq(event.type, 'user.created')) + .orderBy(({ event }) => event.timestamp, 'desc') + ) + + return ( +
    + {events.map(event => ( +
  • + {event.type}: {JSON.stringify(event.payload)} +
  • + ))} +
+ ) +} +``` + +## Deduplication Strategy + +When resuming from a batch offset, Durable Streams may replay the entire batch. The `getDeduplicationKey` function is critical for filtering out already-seen rows. + +**Common patterns:** + +```typescript +// Rows with unique IDs +getDeduplicationKey: (row) => row.id + +// Rows with sequence numbers per entity +getDeduplicationKey: (row) => `${row.entityId}:${row.seq}` + +// Composite keys +getDeduplicationKey: (row) => `${row.timestamp}:${row.id}` +``` + +The deduplication key must be: +- **Unique** within the stream +- **Deterministic** - the same row always produces the same key + +## Reconnection Behavior + +On error, the collection will: +1. Mark as ready (if not already) to avoid blocking UI +2. Wait for `reconnectDelay` milliseconds (default: 5000) +3. Reconnect and resume from the last successful offset + +## License + +MIT diff --git a/packages/durable-stream-db-collection/package.json b/packages/durable-stream-db-collection/package.json new file mode 100644 index 000000000..a41abb238 --- /dev/null +++ b/packages/durable-stream-db-collection/package.json @@ -0,0 +1,65 @@ +{ + "name": "@tanstack/durable-stream-db-collection", + "description": "Durable Streams collection for TanStack DB", + "version": "0.0.1", + "dependencies": { + "@standard-schema/spec": "^1.0.0", + "@tanstack/db": "workspace:*" + }, + "peerDependencies": { + "@durable-streams/client": ">=0.1.0" + }, + "peerDependenciesMeta": { + "@durable-streams/client": { + "optional": true + } + }, + "devDependencies": { + "@vitest/coverage-istanbul": "^3.2.4", + "zod": "^3.23.0" + }, + "exports": { + ".": { + "import": { + "types": "./dist/esm/index.d.ts", + "default": "./dist/esm/index.js" + }, + "require": { + "types": "./dist/cjs/index.d.cts", + "default": "./dist/cjs/index.cjs" + } + }, + "./package.json": "./package.json" + }, + "files": [ + "dist", + "src" + ], + "main": "dist/cjs/index.cjs", + "module": "dist/esm/index.js", + "packageManager": "pnpm@10.24.0", + "author": "TanStack", + "license": "MIT", + "repository": { + "type": "git", + "url": "https://github.com/TanStack/db.git", + "directory": "packages/durable-stream-db-collection" + }, + "homepage": "https://tanstack.com/db", + "keywords": [ + "durable-streams", + "streaming", + "sync", + "optimistic", + "typescript" + ], + "scripts": { + "build": "vite build", + "dev": "vite build --watch", + "lint": "eslint . --fix", + "test": "npx vitest run" + }, + "sideEffects": false, + "type": "module", + "types": "dist/esm/index.d.ts" +} diff --git a/packages/durable-stream-db-collection/src/collection.ts b/packages/durable-stream-db-collection/src/collection.ts new file mode 100644 index 000000000..e5b0ecd4c --- /dev/null +++ b/packages/durable-stream-db-collection/src/collection.ts @@ -0,0 +1,194 @@ +import { DurableStream } from '@durable-streams/client' +import { loadOffset, saveOffset } from './offset-storage' +import type { CollectionConfig, SyncConfig } from '@tanstack/db' +import type { StandardSchemaV1 } from '@standard-schema/spec' +import type { + DurableStreamCollectionConfig, + DurableStreamResult, + RowWithOffset, +} from './types' + +/** + * Helper type to extract the output type from a standard schema + */ +type InferSchemaOutput = T extends StandardSchemaV1 + ? StandardSchemaV1.InferOutput extends object + ? StandardSchemaV1.InferOutput + : Record + : Record + +/** + * Creates Durable Stream collection options for use with a standard Collection. + * + * This is a read-only collection that syncs data from a Durable Streams server + * in JSON mode. Each row is annotated with the batch offset for tracking. + * + * @template TRow - The type of items in the collection + * @param config - Configuration options for the Durable Stream collection + * @returns Collection configuration compatible with TanStack DB createCollection() + * + * @example + * ```typescript + * import { createCollection } from '@tanstack/db' + * import { durableStreamCollectionOptions } from '@tanstack/durable-stream-db-collection' + * + * const eventsCollection = createCollection( + * durableStreamCollectionOptions({ + * url: 'https://api.example.com/v1/stream/events', + * getKey: (row) => row.id, + * getDeduplicationKey: (row) => `${row.id}:${row.seq}`, + * }) + * ) + * ``` + */ + +// Overload for when schema is provided +export function durableStreamCollectionOptions< + T extends StandardSchemaV1, + TRow extends object = InferSchemaOutput, +>( + config: DurableStreamCollectionConfig & { + schema: T + }, +): Omit, string | number, T>, `utils`> & { + id: string + schema: T +} + +// Overload for when no schema is provided +export function durableStreamCollectionOptions( + config: DurableStreamCollectionConfig & { + schema?: never + }, +): Omit, string | number>, `utils`> & { + id: string + schema?: never +} + +// Implementation +export function durableStreamCollectionOptions( + config: DurableStreamCollectionConfig, +): Omit< + CollectionConfig, string | number, any>, + `utils` +> & { + id: string + schema?: any +} { + const collectionId = config.id ?? `durable-stream:${config.url}` + + const sync: SyncConfig>[`sync`] = (params) => { + const { begin, write, commit, markReady } = params + + let aborted = false + + // Track seen deduplication keys to filter replayed rows + const seenKeys = new Set() + + const syncLoop = async () => { + let isFirstBatch = true + + // Load persisted offset or use initial offset + const persistedOffset = await loadOffset(config) + let currentOffset = persistedOffset ?? config.initialOffset ?? `-1` + + // Create the Durable Stream client + const stream = new DurableStream({ + url: config.url, + headers: config.headers, + }) + + try { + const followOptions = { + offset: currentOffset, + live: config.liveMode ?? `long-poll`, + } + + for await (const result of stream.follow( + followOptions, + ) as AsyncIterable>) { + if (aborted) break + + // In JSON mode, result.data is the parsed array + const rows = result.data + + // Only start a transaction if we have rows to process + if (rows.length > 0) { + begin() + + for (const row of rows) { + // Deduplicate - batch offsets may cause replay on resume + const dedupKey = config.getDeduplicationKey(row) + if (seenKeys.has(dedupKey)) { + continue + } + seenKeys.add(dedupKey) + + // Attach batch offset to row + const rowWithOffset: RowWithOffset = { + ...row, + offset: result.offset, + } + + write({ + type: `insert`, + value: rowWithOffset, + }) + } + + commit() + } + + // Update offset for next iteration / persistence + currentOffset = result.offset + await saveOffset(config, currentOffset) + + // Mark ready after first successful batch + if (isFirstBatch) { + markReady() + isFirstBatch = false + } + } + } catch (error) { + console.error(`Durable stream sync error:`, error) + + // Ensure markReady is called even on error so UI doesn't hang + if (isFirstBatch) { + markReady() + } + + // Reconnect after delay if not aborted + if (!aborted) { + const delay = config.reconnectDelay ?? 5000 + setTimeout(syncLoop, delay) + } + } + } + + // Start sync loop + syncLoop() + + // Return cleanup function + return { + cleanup: () => { + aborted = true + }, + } + } + + // Create the getKey function that extracts from RowWithOffset + const getKey = (row: RowWithOffset): string | number => { + // Extract the original row (without offset) for the user's getKey function + // eslint-disable-next-line @typescript-eslint/no-unused-vars + const { offset: _offset, ...originalRow } = row + return config.getKey(originalRow as TRow) + } + + return { + id: collectionId, + schema: config.schema, + getKey, + sync: { sync }, + // No mutation handlers - this is a read-only sync + } +} diff --git a/packages/durable-stream-db-collection/src/durable-streams-client.d.ts b/packages/durable-stream-db-collection/src/durable-streams-client.d.ts new file mode 100644 index 000000000..80538ec62 --- /dev/null +++ b/packages/durable-stream-db-collection/src/durable-streams-client.d.ts @@ -0,0 +1,90 @@ +/** + * Type declarations for @durable-streams/client + * + * This module provides client types for the Durable Streams protocol. + * See: https://github.com/durable-streams/durable-streams + */ + +declare module '@durable-streams/client' { + export interface DurableStreamOptions { + /** + * URL of the Durable Stream endpoint. + */ + url: string + + /** + * HTTP headers to include in requests. + */ + headers?: Record + } + + export interface FollowOptions { + /** + * The offset to start reading from. + * Use '-1' to read from the beginning. + */ + offset: string + + /** + * Live mode for following the stream. + * - 'long-poll': HTTP long-polling (default) + * - 'sse': Server-Sent Events + */ + live?: 'long-poll' | 'sse' + } + + export interface StreamResult { + /** + * The data from this batch. + * In JSON mode, this is an array of parsed JSON objects. + */ + data: TData + + /** + * The Stream-Next-Offset for this batch. + * Use this offset to resume from this point. + */ + offset: string + } + + export interface ReadOptions { + /** + * The offset to start reading from. + */ + offset?: string + } + + export interface ReadResult extends StreamResult {} + + /** + * Durable Streams client for reading from a Durable Stream. + * + * @example + * ```typescript + * const stream = new DurableStream({ url: 'https://api.example.com/v1/stream/events' }) + * + * // Read from a specific offset + * const result = await stream.read({ offset: '-1' }) + * console.log(result.data, result.offset) + * + * // Follow the stream live + * for await (const result of stream.follow({ offset: '-1', live: 'long-poll' })) { + * console.log(result.data, result.offset) + * } + * ``` + */ + export class DurableStream { + constructor(options: DurableStreamOptions) + + /** + * Read data from the stream starting at the given offset. + */ + read(options?: ReadOptions): Promise> + + /** + * Follow the stream from a given offset, yielding results as they arrive. + * This is an async iterator that yields results continuously. + */ + follow(options: FollowOptions): AsyncIterable> + } +} diff --git a/packages/durable-stream-db-collection/src/index.ts b/packages/durable-stream-db-collection/src/index.ts new file mode 100644 index 000000000..b1f6d3bdd --- /dev/null +++ b/packages/durable-stream-db-collection/src/index.ts @@ -0,0 +1,7 @@ +export { durableStreamCollectionOptions } from './collection' +export type { + DurableStreamCollectionConfig, + RowWithOffset, + LiveMode, + OffsetStorage, +} from './types' diff --git a/packages/durable-stream-db-collection/src/offset-storage.ts b/packages/durable-stream-db-collection/src/offset-storage.ts new file mode 100644 index 000000000..d22eba0e9 --- /dev/null +++ b/packages/durable-stream-db-collection/src/offset-storage.ts @@ -0,0 +1,123 @@ +import type { DurableStreamCollectionConfig, OffsetStorage } from './types' + +/** + * Get the storage key for persisting offset. + * @returns The storage key, or null if persistence is disabled. + */ +export function getStorageKey( + config: DurableStreamCollectionConfig, +): string | null { + if (config.storageKey === false) { + return null + } + const prefix = config.storageKey ?? `durable-stream` + return `${prefix}:${config.url}:offset` +} + +/** + * Get the default storage adapter. + * Returns localStorage if available, otherwise null. + */ +function getDefaultStorage(): OffsetStorage | null { + if (typeof localStorage !== `undefined`) { + return localStorage + } + return null +} + +/** + * Get the storage adapter to use. + * Returns the configured storage, or the default storage. + */ +function getStorage( + config: DurableStreamCollectionConfig, +): OffsetStorage | null { + return config.storage ?? getDefaultStorage() +} + +/** + * Load the persisted offset from storage. + * @returns The persisted offset, or null if not found or persistence is disabled. + */ +export async function loadOffset( + config: DurableStreamCollectionConfig, +): Promise { + const key = getStorageKey(config) + if (!key) { + return null + } + + const storage = getStorage(config) + if (!storage) { + return null + } + + try { + const result = storage.getItem(key) + // Handle both sync and async storage + if (result instanceof Promise) { + return (await result) ?? null + } + return result ?? null + } catch { + // Ignore storage errors (e.g., SecurityError in some browsers) + return null + } +} + +/** + * Save the offset to storage. + * Does nothing if persistence is disabled or storage is unavailable. + */ +export async function saveOffset( + config: DurableStreamCollectionConfig, + offset: string, +): Promise { + const key = getStorageKey(config) + if (!key) { + return + } + + const storage = getStorage(config) + if (!storage) { + return + } + + try { + const result = storage.setItem(key, offset) + // Handle both sync and async storage + if (result instanceof Promise) { + await result + } + } catch { + // Ignore storage errors (e.g., QuotaExceededError, SecurityError) + } +} + +/** + * Clear the persisted offset from storage. + * Useful for resetting sync state. + */ +export async function clearOffset( + config: DurableStreamCollectionConfig, +): Promise { + const key = getStorageKey(config) + if (!key) { + return + } + + const storage = getStorage(config) + if (!storage) { + return + } + + try { + // Use setItem with empty string as a fallback since not all storage adapters have removeItem + const result = storage.setItem(key, ``) + if (result instanceof Promise) { + await result + } + } catch { + // Ignore storage errors + } +} diff --git a/packages/durable-stream-db-collection/src/types.ts b/packages/durable-stream-db-collection/src/types.ts new file mode 100644 index 000000000..a25f92330 --- /dev/null +++ b/packages/durable-stream-db-collection/src/types.ts @@ -0,0 +1,173 @@ +import type { StandardSchemaV1 } from '@standard-schema/spec' + +/** + * Storage adapter interface for offset persistence. + * Compatible with localStorage, sessionStorage, AsyncStorage, etc. + */ +export interface OffsetStorage { + getItem: (key: string) => string | null | Promise + setItem: (key: string, value: string) => void | Promise +} + +/** + * Live mode options for following a Durable Stream. + */ +export type LiveMode = 'long-poll' | 'sse' + +/** + * Configuration interface for Durable Stream collection options. + * @template TRow - The type of items in the collection + */ +export interface DurableStreamCollectionConfig { + // ═══════════════════════════════════════════════════════════════════ + // Required + // ═══════════════════════════════════════════════════════════════════ + + /** + * URL of the Durable Stream endpoint. + * Must be a stream in JSON mode. + */ + url: string + + /** + * Extract a unique key from each row. + * Used as the collection's primary key for lookups and updates. + */ + getKey: (row: TRow) => string | number + + /** + * Extract a deduplication key from each row. + * Used to filter out replayed rows when resuming from a batch offset. + * + * This key must be unique within the stream and deterministic - + * the same row must always produce the same deduplication key. + * + * Common patterns: + * - `${row.id}` for rows with unique IDs + * - `${row.groupId}:${row.seq}` for rows with sequence numbers per group + */ + getDeduplicationKey: (row: TRow) => string + + // ═══════════════════════════════════════════════════════════════════ + // Optional + // ═══════════════════════════════════════════════════════════════════ + + /** + * Unique identifier for the collection. + * Auto-generated from URL if not provided. + */ + id?: string + + /** + * Schema for runtime validation and type inference. + * Must be a Standard Schema compatible schema (Zod, Valibot, etc.) + */ + schema?: StandardSchemaV1 + + /** + * Initial offset to start reading from. + * Use '-1' to read from the beginning. + * + * @default '-1' + */ + initialOffset?: string + + /** + * HTTP headers to include in stream requests. + * Useful for authentication tokens. + */ + headers?: Record + + /** + * Delay in milliseconds before reconnecting after an error. + * + * @default 5000 + */ + reconnectDelay?: number + + /** + * Live mode for following the stream. + * + * @default 'long-poll' + */ + liveMode?: LiveMode + + /** + * Storage key prefix for persisting offsets. + * Set to false to disable offset persistence. + * + * @default 'durable-stream' + */ + storageKey?: string | false + + /** + * Custom storage adapter for offset persistence. + * Defaults to localStorage in browsers. + */ + storage?: OffsetStorage +} + +/** + * Output row type includes the batch offset. + * Each row from a Durable Stream batch is annotated with the batch's offset. + */ +export type RowWithOffset = TRow & { offset: string } + +/** + * Result from a Durable Stream follow iteration. + * In JSON mode, data is the parsed array of rows. + */ +export interface DurableStreamResult { + /** + * The data from this batch. In JSON mode, this is an array of parsed JSON objects. + */ + data: Array + + /** + * The Stream-Next-Offset for this batch. + * Use this offset to resume from this point. + */ + offset: string +} + +/** + * Options for the DurableStream.follow() method. + */ +export interface FollowOptions { + /** + * The offset to start reading from. + * Use '-1' to read from the beginning. + */ + offset: string + + /** + * Live mode for following the stream. + */ + live?: LiveMode +} + +/** + * Interface for the Durable Streams client. + * This matches the @durable-streams/client package API. + */ +export interface DurableStreamClient { + /** + * Follow the stream from a given offset, yielding results as they arrive. + */ + follow(options: FollowOptions): AsyncIterable> +} + +/** + * Constructor options for DurableStream client. + */ +export interface DurableStreamOptions { + /** + * URL of the Durable Stream endpoint. + */ + url: string + + /** + * HTTP headers to include in requests. + */ + headers?: Record +} diff --git a/packages/durable-stream-db-collection/tests/collection.test-d.ts b/packages/durable-stream-db-collection/tests/collection.test-d.ts new file mode 100644 index 000000000..29360de13 --- /dev/null +++ b/packages/durable-stream-db-collection/tests/collection.test-d.ts @@ -0,0 +1,204 @@ +import { describe, expectTypeOf, it } from 'vitest' +import { createCollection } from '@tanstack/db' +import { durableStreamCollectionOptions } from '../src/collection' +import type { RowWithOffset } from '../src/types' +import { z } from 'zod' + +describe(`durableStreamCollectionOptions types`, () => { + it(`should infer row type from getKey function`, () => { + interface Event { + id: string + type: string + payload: unknown + } + + const options = durableStreamCollectionOptions({ + url: `http://example.com/stream`, + getKey: (row) => row.id, + getDeduplicationKey: (row) => row.id, + }) + + // The collection should have RowWithOffset as the item type + const collection = createCollection(options) + + // Get should return RowWithOffset | undefined + const item = collection.get(`test`) + expectTypeOf(item).toEqualTypeOf | undefined>() + + if (item) { + expectTypeOf(item.id).toEqualTypeOf() + expectTypeOf(item.type).toEqualTypeOf() + expectTypeOf(item.payload).toEqualTypeOf() + expectTypeOf(item.offset).toEqualTypeOf() + } + }) + + it(`should infer row type from schema`, () => { + const eventSchema = z.object({ + id: z.string(), + type: z.string(), + timestamp: z.number(), + }) + + type Event = z.infer + + const options = durableStreamCollectionOptions({ + url: `http://example.com/stream`, + getKey: (row) => row.id, + getDeduplicationKey: (row) => row.id, + schema: eventSchema, + }) + + const collection = createCollection(options) + const item = collection.get(`test`) + + if (item) { + expectTypeOf(item.id).toEqualTypeOf() + expectTypeOf(item.type).toEqualTypeOf() + expectTypeOf(item.timestamp).toEqualTypeOf() + expectTypeOf(item.offset).toEqualTypeOf() + } + }) + + it(`should allow string or number keys`, () => { + interface Event { + id: number + name: string + } + + const options = durableStreamCollectionOptions({ + url: `http://example.com/stream`, + getKey: (row) => row.id, // number key + getDeduplicationKey: (row) => String(row.id), + }) + + const collection = createCollection(options) + + // Should accept number keys + const item = collection.get(123) + expectTypeOf(item).toEqualTypeOf | undefined>() + }) + + it(`should require getKey and getDeduplicationKey`, () => { + interface Event { + id: string + } + + // @ts-expect-error - missing required getKey + durableStreamCollectionOptions({ + url: `http://example.com/stream`, + getDeduplicationKey: (row) => row.id, + }) + + // @ts-expect-error - missing required getDeduplicationKey + durableStreamCollectionOptions({ + url: `http://example.com/stream`, + getKey: (row) => row.id, + }) + + // @ts-expect-error - missing required url + durableStreamCollectionOptions({ + getKey: (row) => row.id, + getDeduplicationKey: (row) => row.id, + }) + }) + + it(`should type headers correctly`, () => { + interface Event { + id: string + } + + // Should allow Record + durableStreamCollectionOptions({ + url: `http://example.com/stream`, + getKey: (row) => row.id, + getDeduplicationKey: (row) => row.id, + headers: { + Authorization: `Bearer token`, + 'Content-Type': `application/json`, + }, + }) + }) + + it(`should type storage interface correctly`, () => { + interface Event { + id: string + } + + // Sync storage + const syncStorage = { + getItem: (key: string): string | null => null, + setItem: (key: string, value: string): void => {}, + } + + durableStreamCollectionOptions({ + url: `http://example.com/stream`, + getKey: (row) => row.id, + getDeduplicationKey: (row) => row.id, + storage: syncStorage, + }) + + // Async storage + const asyncStorage = { + getItem: async (key: string): Promise => null, + setItem: async (key: string, value: string): Promise => {}, + } + + durableStreamCollectionOptions({ + url: `http://example.com/stream`, + getKey: (row) => row.id, + getDeduplicationKey: (row) => row.id, + storage: asyncStorage, + }) + }) + + it(`should allow storageKey to be false or string`, () => { + interface Event { + id: string + } + + // String prefix + durableStreamCollectionOptions({ + url: `http://example.com/stream`, + getKey: (row) => row.id, + getDeduplicationKey: (row) => row.id, + storageKey: `my-app`, + }) + + // Disabled + durableStreamCollectionOptions({ + url: `http://example.com/stream`, + getKey: (row) => row.id, + getDeduplicationKey: (row) => row.id, + storageKey: false, + }) + }) + + it(`should type liveMode correctly`, () => { + interface Event { + id: string + } + + durableStreamCollectionOptions({ + url: `http://example.com/stream`, + getKey: (row) => row.id, + getDeduplicationKey: (row) => row.id, + liveMode: `long-poll`, + }) + + durableStreamCollectionOptions({ + url: `http://example.com/stream`, + getKey: (row) => row.id, + getDeduplicationKey: (row) => row.id, + liveMode: `sse`, + }) + + // @ts-expect-error - invalid live mode + durableStreamCollectionOptions({ + url: `http://example.com/stream`, + getKey: (row) => row.id, + getDeduplicationKey: (row) => row.id, + liveMode: `invalid`, + }) + }) +}) diff --git a/packages/durable-stream-db-collection/tests/collection.test.ts b/packages/durable-stream-db-collection/tests/collection.test.ts new file mode 100644 index 000000000..7ccfcd4a9 --- /dev/null +++ b/packages/durable-stream-db-collection/tests/collection.test.ts @@ -0,0 +1,603 @@ +import { beforeEach, describe, expect, it, vi } from 'vitest' +import { createCollection } from '@tanstack/db' +import { durableStreamCollectionOptions } from '../src/collection' +import type { DurableStreamResult, RowWithOffset } from '../src/types' + +// Test row type +interface TestRow { + id: string + name: string + seq: number +} + +// Mock controller for the follow iterator +interface MockFollowController { + emit: (result: DurableStreamResult) => void + complete: () => void + error: (err: Error) => void +} + +// Mock the @durable-streams/client module +let mockFollowController: MockFollowController | null = null +const mockFollow = vi.fn() + +vi.mock(`@durable-streams/client`, () => { + return { + DurableStream: vi.fn().mockImplementation(() => ({ + follow: mockFollow, + })), + } +}) + +// Helper to create an async iterator from a controller +function createMockFollowIterator(): AsyncIterable> { + const queue: Array> = [] + let resolveNext: ((value: IteratorResult>) => void) | null = null + let isDone = false + let error: Error | null = null + + mockFollowController = { + emit: (result) => { + if (resolveNext) { + resolveNext({ value: result, done: false }) + resolveNext = null + } else { + queue.push(result) + } + }, + complete: () => { + isDone = true + if (resolveNext) { + resolveNext({ value: undefined as any, done: true }) + resolveNext = null + } + }, + error: (err) => { + error = err + if (resolveNext) { + // We need to reject the promise, but we can't do that from here + // So we'll throw on next iteration + } + }, + } + + return { + [Symbol.asyncIterator](): AsyncIterator> { + return { + async next() { + if (error) { + throw error + } + if (queue.length > 0) { + return { value: queue.shift()!, done: false } + } + if (isDone) { + return { value: undefined as any, done: true } + } + return new Promise((resolve) => { + resolveNext = resolve + }) + }, + } + }, + } +} + +describe(`durableStreamCollectionOptions`, () => { + beforeEach(() => { + vi.clearAllMocks() + mockFollowController = null + + // Setup mock follow to return our controlled iterator + mockFollow.mockImplementation(() => createMockFollowIterator()) + }) + + it(`should create a collection with correct id from url`, () => { + const options = durableStreamCollectionOptions({ + url: `http://example.com/stream/events`, + getKey: (row) => row.id, + getDeduplicationKey: (row) => `${row.id}:${row.seq}`, + }) + + expect(options.id).toBe(`durable-stream:http://example.com/stream/events`) + }) + + it(`should use custom id when provided`, () => { + const options = durableStreamCollectionOptions({ + id: `my-custom-id`, + url: `http://example.com/stream/events`, + getKey: (row) => row.id, + getDeduplicationKey: (row) => `${row.id}:${row.seq}`, + }) + + expect(options.id).toBe(`my-custom-id`) + }) + + it(`should sync data from stream and mark ready after first batch`, async () => { + const options = durableStreamCollectionOptions({ + url: `http://example.com/stream/events`, + getKey: (row) => row.id, + getDeduplicationKey: (row) => `${row.id}:${row.seq}`, + storageKey: false, // Disable persistence for test + }) + + const collection = createCollection(options) + collection.startSyncImmediate() + + // Wait for sync to start + await new Promise((resolve) => setTimeout(resolve, 10)) + + // Emit first batch + mockFollowController?.emit({ + data: [ + { id: `1`, name: `Test 1`, seq: 0 }, + { id: `2`, name: `Test 2`, seq: 0 }, + ], + offset: `offset-1`, + }) + + // Wait for processing + await new Promise((resolve) => setTimeout(resolve, 10)) + + // Collection should be ready and have data + expect(collection.isReady()).toBe(true) + expect(collection.size).toBe(2) + expect(collection.get(`1`)).toEqual({ + id: `1`, + name: `Test 1`, + seq: 0, + offset: `offset-1`, + }) + expect(collection.get(`2`)).toEqual({ + id: `2`, + name: `Test 2`, + seq: 0, + offset: `offset-1`, + }) + + await collection.cleanup() + }) + + it(`should attach batch offset to each row`, async () => { + const options = durableStreamCollectionOptions({ + url: `http://example.com/stream/events`, + getKey: (row) => row.id, + getDeduplicationKey: (row) => `${row.id}:${row.seq}`, + storageKey: false, + }) + + const collection = createCollection(options) + collection.startSyncImmediate() + + await new Promise((resolve) => setTimeout(resolve, 10)) + + // Emit batch with specific offset + mockFollowController?.emit({ + data: [{ id: `1`, name: `Test`, seq: 0 }], + offset: `batch-offset-123`, + }) + + await new Promise((resolve) => setTimeout(resolve, 10)) + + const row = collection.get(`1`) as RowWithOffset + expect(row.offset).toBe(`batch-offset-123`) + + await collection.cleanup() + }) + + it(`should deduplicate replayed rows on resume`, async () => { + const options = durableStreamCollectionOptions({ + url: `http://example.com/stream/events`, + getKey: (row) => row.id, + getDeduplicationKey: (row) => `${row.id}:${row.seq}`, + storageKey: false, + }) + + const collection = createCollection(options) + collection.startSyncImmediate() + + await new Promise((resolve) => setTimeout(resolve, 10)) + + // First batch with seq 0 and 1 + mockFollowController?.emit({ + data: [ + { id: `1`, name: `Test`, seq: 0 }, + { id: `1`, name: `Test`, seq: 1 }, + ], + offset: `offset-a`, + }) + + await new Promise((resolve) => setTimeout(resolve, 10)) + + expect(collection.size).toBe(2) + + // Second batch with seq 1 replayed and seq 2 new + mockFollowController?.emit({ + data: [ + { id: `1`, name: `Test`, seq: 1 }, // Replayed - should be deduplicated + { id: `1`, name: `Test`, seq: 2 }, // New + ], + offset: `offset-b`, + }) + + await new Promise((resolve) => setTimeout(resolve, 10)) + + // Should have 3 items, not 4 (seq 1 deduplicated) + expect(collection.size).toBe(3) + + await collection.cleanup() + }) + + it(`should handle empty batches without starting transaction`, async () => { + const options = durableStreamCollectionOptions({ + url: `http://example.com/stream/events`, + getKey: (row) => row.id, + getDeduplicationKey: (row) => `${row.id}:${row.seq}`, + storageKey: false, + }) + + const collection = createCollection(options) + collection.startSyncImmediate() + + await new Promise((resolve) => setTimeout(resolve, 10)) + + // Emit empty batch + mockFollowController?.emit({ + data: [], + offset: `offset-empty`, + }) + + await new Promise((resolve) => setTimeout(resolve, 10)) + + // Collection should be ready even with empty batch + expect(collection.isReady()).toBe(true) + expect(collection.size).toBe(0) + + await collection.cleanup() + }) + + it(`should handle multiple sequential batches`, async () => { + const options = durableStreamCollectionOptions({ + url: `http://example.com/stream/events`, + getKey: (row) => row.id, + getDeduplicationKey: (row) => `${row.id}:${row.seq}`, + storageKey: false, + }) + + const collection = createCollection(options) + collection.startSyncImmediate() + + await new Promise((resolve) => setTimeout(resolve, 10)) + + // First batch + mockFollowController?.emit({ + data: [{ id: `1`, name: `Test 1`, seq: 0 }], + offset: `offset-1`, + }) + + await new Promise((resolve) => setTimeout(resolve, 10)) + + expect(collection.size).toBe(1) + + // Second batch + mockFollowController?.emit({ + data: [{ id: `2`, name: `Test 2`, seq: 0 }], + offset: `offset-2`, + }) + + await new Promise((resolve) => setTimeout(resolve, 10)) + + expect(collection.size).toBe(2) + + // Third batch + mockFollowController?.emit({ + data: [{ id: `3`, name: `Test 3`, seq: 0 }], + offset: `offset-3`, + }) + + await new Promise((resolve) => setTimeout(resolve, 10)) + + expect(collection.size).toBe(3) + + await collection.cleanup() + }) + + it(`should use getKey function correctly with offset stripped`, async () => { + const options = durableStreamCollectionOptions({ + url: `http://example.com/stream/events`, + // getKey should work on the original row type + getKey: (row) => row.id, + getDeduplicationKey: (row) => `${row.id}:${row.seq}`, + storageKey: false, + }) + + const collection = createCollection(options) + collection.startSyncImmediate() + + await new Promise((resolve) => setTimeout(resolve, 10)) + + mockFollowController?.emit({ + data: [{ id: `test-id-123`, name: `Test`, seq: 0 }], + offset: `offset-1`, + }) + + await new Promise((resolve) => setTimeout(resolve, 10)) + + // Should be able to get by the original key + expect(collection.has(`test-id-123`)).toBe(true) + expect(collection.get(`test-id-123`)).toBeDefined() + + await collection.cleanup() + }) + + it(`should pass headers to DurableStream client`, () => { + durableStreamCollectionOptions({ + url: `http://example.com/stream/events`, + getKey: (row) => row.id, + getDeduplicationKey: (row) => `${row.id}:${row.seq}`, + headers: { + Authorization: `Bearer test-token`, + 'X-Custom-Header': `custom-value`, + }, + storageKey: false, + }) + + // Check that DurableStream was instantiated with the headers + const { DurableStream } = require(`@durable-streams/client`) + + // Create collection to trigger the sync + const options = durableStreamCollectionOptions({ + url: `http://example.com/stream/events`, + getKey: (row) => row.id, + getDeduplicationKey: (row) => `${row.id}:${row.seq}`, + headers: { + Authorization: `Bearer test-token`, + }, + storageKey: false, + }) + + const collection = createCollection(options) + collection.startSyncImmediate() + + expect(DurableStream).toHaveBeenCalledWith({ + url: `http://example.com/stream/events`, + headers: { + Authorization: `Bearer test-token`, + }, + }) + + collection.cleanup() + }) +}) + +describe(`offset-storage`, () => { + it(`should load offset from storage on start`, async () => { + const mockStorage = { + getItem: vi.fn().mockReturnValue(`saved-offset-123`), + setItem: vi.fn(), + } + + const options = durableStreamCollectionOptions({ + url: `http://example.com/stream/events`, + getKey: (row) => row.id, + getDeduplicationKey: (row) => `${row.id}:${row.seq}`, + storage: mockStorage, + }) + + const collection = createCollection(options) + collection.startSyncImmediate() + + // Wait for async storage operations + await new Promise((resolve) => setTimeout(resolve, 50)) + + // Should have loaded from storage + expect(mockStorage.getItem).toHaveBeenCalledWith( + `durable-stream:http://example.com/stream/events:offset`, + ) + + // follow should have been called with the saved offset + expect(mockFollow).toHaveBeenCalledWith({ + offset: `saved-offset-123`, + live: `long-poll`, + }) + + await collection.cleanup() + }) + + it(`should save offset to storage after each batch`, async () => { + const mockStorage = { + getItem: vi.fn().mockReturnValue(null), + setItem: vi.fn(), + } + + const options = durableStreamCollectionOptions({ + url: `http://example.com/stream/events`, + getKey: (row) => row.id, + getDeduplicationKey: (row) => `${row.id}:${row.seq}`, + storage: mockStorage, + }) + + const collection = createCollection(options) + collection.startSyncImmediate() + + await new Promise((resolve) => setTimeout(resolve, 10)) + + // Emit batch + mockFollowController?.emit({ + data: [{ id: `1`, name: `Test`, seq: 0 }], + offset: `new-offset-456`, + }) + + await new Promise((resolve) => setTimeout(resolve, 50)) + + // Should have saved the new offset + expect(mockStorage.setItem).toHaveBeenCalledWith( + `durable-stream:http://example.com/stream/events:offset`, + `new-offset-456`, + ) + + await collection.cleanup() + }) + + it(`should use custom storage key prefix`, async () => { + const mockStorage = { + getItem: vi.fn().mockReturnValue(null), + setItem: vi.fn(), + } + + const options = durableStreamCollectionOptions({ + url: `http://example.com/stream/events`, + getKey: (row) => row.id, + getDeduplicationKey: (row) => `${row.id}:${row.seq}`, + storage: mockStorage, + storageKey: `my-app`, + }) + + const collection = createCollection(options) + collection.startSyncImmediate() + + await new Promise((resolve) => setTimeout(resolve, 10)) + + // Should use custom prefix + expect(mockStorage.getItem).toHaveBeenCalledWith( + `my-app:http://example.com/stream/events:offset`, + ) + + await collection.cleanup() + }) + + it(`should not persist when storageKey is false`, async () => { + const mockStorage = { + getItem: vi.fn().mockReturnValue(null), + setItem: vi.fn(), + } + + const options = durableStreamCollectionOptions({ + url: `http://example.com/stream/events`, + getKey: (row) => row.id, + getDeduplicationKey: (row) => `${row.id}:${row.seq}`, + storage: mockStorage, + storageKey: false, + }) + + const collection = createCollection(options) + collection.startSyncImmediate() + + await new Promise((resolve) => setTimeout(resolve, 10)) + + mockFollowController?.emit({ + data: [{ id: `1`, name: `Test`, seq: 0 }], + offset: `some-offset`, + }) + + await new Promise((resolve) => setTimeout(resolve, 50)) + + // Should not have called storage at all + expect(mockStorage.getItem).not.toHaveBeenCalled() + expect(mockStorage.setItem).not.toHaveBeenCalled() + + await collection.cleanup() + }) + + it(`should use initialOffset when no persisted offset exists`, async () => { + const mockStorage = { + getItem: vi.fn().mockReturnValue(null), + setItem: vi.fn(), + } + + const options = durableStreamCollectionOptions({ + url: `http://example.com/stream/events`, + getKey: (row) => row.id, + getDeduplicationKey: (row) => `${row.id}:${row.seq}`, + storage: mockStorage, + initialOffset: `custom-initial-offset`, + }) + + const collection = createCollection(options) + collection.startSyncImmediate() + + await new Promise((resolve) => setTimeout(resolve, 50)) + + // follow should have been called with the initial offset + expect(mockFollow).toHaveBeenCalledWith({ + offset: `custom-initial-offset`, + live: `long-poll`, + }) + + await collection.cleanup() + }) + + it(`should default to -1 offset when no persisted or initial offset`, async () => { + const mockStorage = { + getItem: vi.fn().mockReturnValue(null), + setItem: vi.fn(), + } + + const options = durableStreamCollectionOptions({ + url: `http://example.com/stream/events`, + getKey: (row) => row.id, + getDeduplicationKey: (row) => `${row.id}:${row.seq}`, + storage: mockStorage, + }) + + const collection = createCollection(options) + collection.startSyncImmediate() + + await new Promise((resolve) => setTimeout(resolve, 50)) + + // follow should have been called with -1 (default) + expect(mockFollow).toHaveBeenCalledWith({ + offset: `-1`, + live: `long-poll`, + }) + + await collection.cleanup() + }) +}) + +describe(`live mode configuration`, () => { + it(`should use long-poll mode by default`, async () => { + const options = durableStreamCollectionOptions({ + url: `http://example.com/stream/events`, + getKey: (row) => row.id, + getDeduplicationKey: (row) => `${row.id}:${row.seq}`, + storageKey: false, + }) + + const collection = createCollection(options) + collection.startSyncImmediate() + + await new Promise((resolve) => setTimeout(resolve, 50)) + + expect(mockFollow).toHaveBeenCalledWith( + expect.objectContaining({ + live: `long-poll`, + }), + ) + + await collection.cleanup() + }) + + it(`should use sse mode when configured`, async () => { + const options = durableStreamCollectionOptions({ + url: `http://example.com/stream/events`, + getKey: (row) => row.id, + getDeduplicationKey: (row) => `${row.id}:${row.seq}`, + liveMode: `sse`, + storageKey: false, + }) + + const collection = createCollection(options) + collection.startSyncImmediate() + + await new Promise((resolve) => setTimeout(resolve, 50)) + + expect(mockFollow).toHaveBeenCalledWith( + expect.objectContaining({ + live: `sse`, + }), + ) + + await collection.cleanup() + }) +}) diff --git a/packages/durable-stream-db-collection/tsconfig.json b/packages/durable-stream-db-collection/tsconfig.json new file mode 100644 index 000000000..ad23edbc0 --- /dev/null +++ b/packages/durable-stream-db-collection/tsconfig.json @@ -0,0 +1,20 @@ +{ + "extends": "../../tsconfig.json", + "compilerOptions": { + "target": "ES2020", + "module": "ESNext", + "moduleResolution": "Bundler", + "declaration": true, + "outDir": "dist", + "strict": true, + "esModuleInterop": true, + "skipLibCheck": true, + "forceConsistentCasingInFileNames": true, + "paths": { + "@tanstack/db-ivm": ["../db-ivm/src"], + "@tanstack/db": ["../db/src"] + } + }, + "include": ["src", "tests", "vite.config.ts"], + "exclude": ["node_modules", "dist"] +} diff --git a/packages/durable-stream-db-collection/vite.config.ts b/packages/durable-stream-db-collection/vite.config.ts new file mode 100644 index 000000000..6b9e3c255 --- /dev/null +++ b/packages/durable-stream-db-collection/vite.config.ts @@ -0,0 +1,24 @@ +import { defineConfig, mergeConfig } from 'vitest/config' +import { tanstackViteConfig } from '@tanstack/vite-config' +import packageJson from './package.json' + +const config = defineConfig({ + test: { + name: packageJson.name, + include: [`tests/**/*.test.ts`], + environment: `jsdom`, + coverage: { enabled: true, provider: `istanbul`, include: [`src/**/*`] }, + typecheck: { + enabled: true, + include: [`tests/**/*.test.ts`, `tests/**/*.test-d.ts`], + }, + }, +}) + +export default mergeConfig( + config, + tanstackViteConfig({ + entry: `./src/index.ts`, + srcDir: `./src`, + }), +) From e273179fb9087f98fc7708480fdc8e4cf90698a4 Mon Sep 17 00:00:00 2001 From: James Arthur Date: Tue, 9 Dec 2025 17:00:43 -0800 Subject: [PATCH 2/5] wip: adding collect aggregation function. --- .tool-versions | 2 + packages/db-ivm/src/operators/groupBy.ts | 27 ++ packages/db/src/query/builder/functions.ts | 11 + packages/db/src/query/compiler/group-by.ts | 4 +- packages/db/src/query/index.ts | 1 + packages/db/tests/query/group-by.test.ts | 238 ++++++++++++++++++ .../durable-stream-db-collection/package.json | 2 +- 7 files changed, 283 insertions(+), 2 deletions(-) create mode 100644 .tool-versions diff --git a/.tool-versions b/.tool-versions new file mode 100644 index 000000000..48ffe3307 --- /dev/null +++ b/.tool-versions @@ -0,0 +1,2 @@ +nodejs 22.13.1 +pnpm 10.13.1 \ No newline at end of file diff --git a/packages/db-ivm/src/operators/groupBy.ts b/packages/db-ivm/src/operators/groupBy.ts index b752ae560..1d01e8eeb 100644 --- a/packages/db-ivm/src/operators/groupBy.ts +++ b/packages/db-ivm/src/operators/groupBy.ts @@ -366,6 +366,32 @@ export function mode( } } +/** + * Creates a collect aggregate function that gathers all values into an array + * This is similar to SQL's array_agg or GROUP_CONCAT + * @param valueExtractor Function to extract a value from each data entry + */ +export function collect( + valueExtractor: (value: T) => V = (v) => v as unknown as V, +): AggregateFunction, Array> { + return { + preMap: (data: T) => [valueExtractor(data)], + reduce: (values: Array<[Array, number]>) => { + const allValues: Array = [] + for (const [valueArray, multiplicity] of values) { + for (const value of valueArray) { + // Add each value 'multiplicity' times for correct IVM semantics + for (let i = 0; i < multiplicity; i++) { + allValues.push(value) + } + } + } + return allValues + }, + // No postMap - return the array directly + } +} + export const groupByOperators = { sum, count, @@ -374,4 +400,5 @@ export const groupByOperators = { max, median, mode, + collect, } diff --git a/packages/db/src/query/builder/functions.ts b/packages/db/src/query/builder/functions.ts index 41ce11370..bc9cadc3a 100644 --- a/packages/db/src/query/builder/functions.ts +++ b/packages/db/src/query/builder/functions.ts @@ -324,6 +324,16 @@ export function max(arg: T): AggregateReturnType { return new Aggregate(`max`, [toExpression(arg)]) as AggregateReturnType } +/** + * Collects all values in a group into an array + * Similar to SQL's array_agg or GROUP_CONCAT + */ +export function collect( + arg: T, +): Aggregate>> { + return new Aggregate(`collect`, [toExpression(arg)]) +} + /** * List of comparison function names that can be used with indexes */ @@ -373,6 +383,7 @@ export const operators = [ `sum`, `min`, `max`, + `collect`, ] as const export type OperatorName = (typeof operators)[number] diff --git a/packages/db/src/query/compiler/group-by.ts b/packages/db/src/query/compiler/group-by.ts index e9c8fa436..f6f6ee253 100644 --- a/packages/db/src/query/compiler/group-by.ts +++ b/packages/db/src/query/compiler/group-by.ts @@ -16,7 +16,7 @@ import type { } from '../ir.js' import type { NamespacedAndKeyedStream, NamespacedRow } from '../../types.js' -const { sum, count, avg, min, max } = groupByOperators +const { sum, count, avg, min, max, collect } = groupByOperators /** * Interface for caching the mapping between GROUP BY expressions and SELECT expressions @@ -379,6 +379,8 @@ function getAggregateFunction(aggExpr: Aggregate) { return min(valueExtractorWithDate) case `max`: return max(valueExtractorWithDate) + case `collect`: + return collect(rawValueExtractor) default: throw new UnsupportedAggregateFunctionError(aggExpr.name) } diff --git a/packages/db/src/query/index.ts b/packages/db/src/query/index.ts index e798b283a..63905caf1 100644 --- a/packages/db/src/query/index.ts +++ b/packages/db/src/query/index.ts @@ -41,6 +41,7 @@ export { sum, min, max, + collect, } from './builder/functions.js' // Ref proxy utilities diff --git a/packages/db/tests/query/group-by.test.ts b/packages/db/tests/query/group-by.test.ts index e21f7be2c..d726245c1 100644 --- a/packages/db/tests/query/group-by.test.ts +++ b/packages/db/tests/query/group-by.test.ts @@ -5,6 +5,7 @@ import { mockSyncCollectionOptions } from '../utils.js' import { and, avg, + collect, count, eq, gt, @@ -1387,3 +1388,240 @@ describe(`Query GROUP BY Execution`, () => { createGroupByTests(`off`) createGroupByTests(`eager`) }) + +describe(`Collect Aggregate Function`, () => { + let ordersCollection: ReturnType + + function createOrdersCollection(autoIndex: `off` | `eager` = `eager`) { + return createCollection( + mockSyncCollectionOptions({ + id: `test-orders-collect`, + getKey: (order) => order.id, + initialData: sampleOrders, + autoIndex, + }), + ) + } + + beforeEach(() => { + ordersCollection = createOrdersCollection() + }) + + test(`collect gathers all values into an array`, () => { + const ordersByCustomer = createLiveQueryCollection({ + startSync: true, + query: (q) => + q + .from({ orders: ordersCollection }) + .groupBy(({ orders }) => orders.customer_id) + .select(({ orders }) => ({ + customer_id: orders.customer_id, + order_ids: collect(orders.id), + amounts: collect(orders.amount), + })), + }) + + expect(ordersByCustomer.size).toBe(3) // 3 customers + + // Customer 1: orders 1, 2, 7 (amounts: 100, 200, 400) + const customer1 = ordersByCustomer.get(1) + expect(customer1).toBeDefined() + expect(customer1?.order_ids).toHaveLength(3) + expect(customer1?.order_ids).toEqual(expect.arrayContaining([1, 2, 7])) + expect(customer1?.amounts).toHaveLength(3) + expect(customer1?.amounts).toEqual(expect.arrayContaining([100, 200, 400])) + + // Customer 2: orders 3, 4 (amounts: 150, 300) + const customer2 = ordersByCustomer.get(2) + expect(customer2).toBeDefined() + expect(customer2?.order_ids).toHaveLength(2) + expect(customer2?.order_ids).toEqual(expect.arrayContaining([3, 4])) + expect(customer2?.amounts).toEqual(expect.arrayContaining([150, 300])) + + // Customer 3: orders 5, 6 (amounts: 250, 75) + const customer3 = ordersByCustomer.get(3) + expect(customer3).toBeDefined() + expect(customer3?.order_ids).toHaveLength(2) + expect(customer3?.order_ids).toEqual(expect.arrayContaining([5, 6])) + expect(customer3?.amounts).toEqual(expect.arrayContaining([250, 75])) + }) + + test(`collect works with string values`, () => { + const statusesByCustomer = createLiveQueryCollection({ + startSync: true, + query: (q) => + q + .from({ orders: ordersCollection }) + .groupBy(({ orders }) => orders.customer_id) + .select(({ orders }) => ({ + customer_id: orders.customer_id, + statuses: collect(orders.status), + })), + }) + + // Customer 1: all completed orders + const customer1 = statusesByCustomer.get(1) + expect(customer1?.statuses).toHaveLength(3) + expect(customer1?.statuses.every((s) => s === `completed`)).toBe(true) + + // Customer 3: pending and cancelled + const customer3 = statusesByCustomer.get(3) + expect(customer3?.statuses).toHaveLength(2) + expect(customer3?.statuses).toEqual( + expect.arrayContaining([`pending`, `cancelled`]), + ) + }) + + test(`collect combined with other aggregates`, () => { + const customerStats = createLiveQueryCollection({ + startSync: true, + query: (q) => + q + .from({ orders: ordersCollection }) + .groupBy(({ orders }) => orders.customer_id) + .select(({ orders }) => ({ + customer_id: orders.customer_id, + order_ids: collect(orders.id), + order_count: count(orders.id), + total_amount: sum(orders.amount), + amounts: collect(orders.amount), + })), + }) + + const customer1 = customerStats.get(1) + expect(customer1?.order_ids).toHaveLength(3) + expect(customer1?.order_count).toBe(3) + expect(customer1?.total_amount).toBe(700) + expect(customer1?.amounts).toHaveLength(3) + }) + + test(`collect with live updates - insert`, () => { + const ordersByCustomer = createLiveQueryCollection({ + startSync: true, + query: (q) => + q + .from({ orders: ordersCollection }) + .groupBy(({ orders }) => orders.customer_id) + .select(({ orders }) => ({ + customer_id: orders.customer_id, + order_ids: collect(orders.id), + })), + }) + + // Initial state + const initialCustomer1 = ordersByCustomer.get(1) + expect(initialCustomer1?.order_ids).toHaveLength(3) + + // Insert new order for customer 1 + const newOrder: Order = { + id: 8, + customer_id: 1, + amount: 500, + status: `completed`, + date: new Date(`2023-03-15`), + product_category: `electronics`, + quantity: 2, + discount: 0, + sales_rep_id: 1, + } + + ordersCollection.utils.begin() + ordersCollection.utils.write({ type: `insert`, value: newOrder }) + ordersCollection.utils.commit() + + const updatedCustomer1 = ordersByCustomer.get(1) + expect(updatedCustomer1?.order_ids).toHaveLength(4) + expect(updatedCustomer1?.order_ids).toEqual( + expect.arrayContaining([1, 2, 7, 8]), + ) + }) + + test(`collect with live updates - delete`, () => { + const ordersByCustomer = createLiveQueryCollection({ + startSync: true, + query: (q) => + q + .from({ orders: ordersCollection }) + .groupBy(({ orders }) => orders.customer_id) + .select(({ orders }) => ({ + customer_id: orders.customer_id, + order_ids: collect(orders.id), + })), + }) + + // Initial state + const initialCustomer3 = ordersByCustomer.get(3) + expect(initialCustomer3?.order_ids).toHaveLength(2) + expect(initialCustomer3?.order_ids).toEqual(expect.arrayContaining([5, 6])) + + // Delete order 6 + const orderToDelete = sampleOrders.find((o) => o.id === 6)! + + ordersCollection.utils.begin() + ordersCollection.utils.write({ type: `delete`, value: orderToDelete }) + ordersCollection.utils.commit() + + const updatedCustomer3 = ordersByCustomer.get(3) + expect(updatedCustomer3?.order_ids).toHaveLength(1) + expect(updatedCustomer3?.order_ids).toEqual([5]) + }) + + test(`collect with WHERE filter`, () => { + const completedOrdersByCustomer = createLiveQueryCollection({ + startSync: true, + query: (q) => + q + .from({ orders: ordersCollection }) + .where(({ orders }) => eq(orders.status, `completed`)) + .groupBy(({ orders }) => orders.customer_id) + .select(({ orders }) => ({ + customer_id: orders.customer_id, + order_ids: collect(orders.id), + })), + }) + + // Customer 1: all 3 orders are completed + const customer1 = completedOrdersByCustomer.get(1) + expect(customer1?.order_ids).toHaveLength(3) + + // Customer 2: only order 4 is completed + const customer2 = completedOrdersByCustomer.get(2) + expect(customer2?.order_ids).toHaveLength(1) + expect(customer2?.order_ids).toEqual([4]) + + // Customer 3: no completed orders + const customer3 = completedOrdersByCustomer.get(3) + expect(customer3).toBeUndefined() + }) + + test(`collect with multiple column grouping`, () => { + const ordersByStatusAndCategory = createLiveQueryCollection({ + startSync: true, + query: (q) => + q + .from({ orders: ordersCollection }) + .groupBy(({ orders }) => [orders.status, orders.product_category]) + .select(({ orders }) => ({ + status: orders.status, + product_category: orders.product_category, + order_ids: collect(orders.id), + })), + }) + + // Completed electronics: orders 1, 2, 4 + const completedElectronics = ordersByStatusAndCategory.get( + `["completed","electronics"]`, + ) + expect(completedElectronics?.order_ids).toHaveLength(3) + expect(completedElectronics?.order_ids).toEqual( + expect.arrayContaining([1, 2, 4]), + ) + + // Completed books: order 7 + const completedBooks = ordersByStatusAndCategory.get( + `["completed","books"]`, + ) + expect(completedBooks?.order_ids).toHaveLength(1) + expect(completedBooks?.order_ids).toEqual([7]) + }) +}) diff --git a/packages/durable-stream-db-collection/package.json b/packages/durable-stream-db-collection/package.json index a41abb238..a45732900 100644 --- a/packages/durable-stream-db-collection/package.json +++ b/packages/durable-stream-db-collection/package.json @@ -7,7 +7,7 @@ "@tanstack/db": "workspace:*" }, "peerDependencies": { - "@durable-streams/client": ">=0.1.0" + "@durable-streams/client": "workspace:*" }, "peerDependenciesMeta": { "@durable-streams/client": { From dcfe42fef3046126bf24a63981481e6847649fba Mon Sep 17 00:00:00 2001 From: James Arthur Date: Sun, 14 Dec 2025 13:09:15 -0800 Subject: [PATCH 3/5] add minStr and maxStr functions --- packages/db/src/query/builder/functions.ts | 24 ++++++++++++++++++++++ packages/db/src/query/compiler/group-by.ts | 4 ++++ packages/db/src/query/index.ts | 2 ++ 3 files changed, 30 insertions(+) diff --git a/packages/db/src/query/builder/functions.ts b/packages/db/src/query/builder/functions.ts index bc9cadc3a..11593b6b5 100644 --- a/packages/db/src/query/builder/functions.ts +++ b/packages/db/src/query/builder/functions.ts @@ -324,6 +324,28 @@ export function max(arg: T): AggregateReturnType { return new Aggregate(`max`, [toExpression(arg)]) as AggregateReturnType } +/** + * String-typed min aggregate. + * Unlike min() which coerces to number, minStr() preserves string values + * for proper lexicographic comparison (e.g., ISO 8601 date strings). + */ +export function minStr( + arg: T +): Aggregate { + return new Aggregate(`minstr`, [toExpression(arg)]) +} + +/** + * String-typed max aggregate. + * Unlike max() which coerces to number, maxStr() preserves string values + * for proper lexicographic comparison (e.g., ISO 8601 date strings). + */ +export function maxStr( + arg: T +): Aggregate { + return new Aggregate(`maxstr`, [toExpression(arg)]) +} + /** * Collects all values in a group into an array * Similar to SQL's array_agg or GROUP_CONCAT @@ -383,6 +405,8 @@ export const operators = [ `sum`, `min`, `max`, + `minStr`, + `maxStr`, `collect`, ] as const diff --git a/packages/db/src/query/compiler/group-by.ts b/packages/db/src/query/compiler/group-by.ts index f6f6ee253..9fe034ee7 100644 --- a/packages/db/src/query/compiler/group-by.ts +++ b/packages/db/src/query/compiler/group-by.ts @@ -379,6 +379,10 @@ function getAggregateFunction(aggExpr: Aggregate) { return min(valueExtractorWithDate) case `max`: return max(valueExtractorWithDate) + case `minstr`: + return min(rawValueExtractor) + case `maxstr`: + return max(rawValueExtractor) case `collect`: return collect(rawValueExtractor) default: diff --git a/packages/db/src/query/index.ts b/packages/db/src/query/index.ts index 63905caf1..445a071a9 100644 --- a/packages/db/src/query/index.ts +++ b/packages/db/src/query/index.ts @@ -41,6 +41,8 @@ export { sum, min, max, + minStr, + maxStr, collect, } from './builder/functions.js' From 71afbce305c5a5dc0229ab7cb7ef688c1ededc3f Mon Sep 17 00:00:00 2001 From: James Arthur Date: Sun, 14 Dec 2025 13:10:27 -0800 Subject: [PATCH 4/5] working durable stream collection. --- .../src/collection.ts | 32 +- .../src/durable-streams-client.d.ts | 90 -- .../durable-stream-db-collection/src/types.ts | 8 +- .../tsconfig.json | 3 +- pnpm-lock.yaml | 964 +++++++++++++++++- pnpm-workspace.yaml | 1 + 6 files changed, 978 insertions(+), 120 deletions(-) delete mode 100644 packages/durable-stream-db-collection/src/durable-streams-client.d.ts diff --git a/packages/durable-stream-db-collection/src/collection.ts b/packages/durable-stream-db-collection/src/collection.ts index e5b0ecd4c..9ad793798 100644 --- a/packages/durable-stream-db-collection/src/collection.ts +++ b/packages/durable-stream-db-collection/src/collection.ts @@ -96,6 +96,7 @@ export function durableStreamCollectionOptions( const stream = new DurableStream({ url: config.url, headers: config.headers, + signal: config.signal, }) try { @@ -104,13 +105,30 @@ export function durableStreamCollectionOptions( live: config.liveMode ?? `long-poll`, } - for await (const result of stream.follow( - followOptions, - ) as AsyncIterable>) { + for await (const chunk of stream.read(followOptions)) { if (aborted) break - // In JSON mode, result.data is the parsed array - const rows = result.data + // Parse JSON from raw bytes + // The stream returns Uint8Array, we need to decode and parse + let rows: Array + try { + const text = new TextDecoder().decode(chunk.data) + if (!text.trim()) { + // Empty response, skip + continue + } + const parsed = JSON.parse(text) + // Server may return array directly or wrapped in an object + rows = Array.isArray(parsed) ? parsed : [parsed] + } catch { + // Skip malformed JSON + continue + } + + const result: DurableStreamResult = { + data: rows, + offset: chunk.offset, + } // Only start a transaction if we have rows to process if (rows.length > 0) { @@ -146,6 +164,7 @@ export function durableStreamCollectionOptions( // Mark ready after first successful batch if (isFirstBatch) { markReady() + isFirstBatch = false } } @@ -160,6 +179,7 @@ export function durableStreamCollectionOptions( // Reconnect after delay if not aborted if (!aborted) { const delay = config.reconnectDelay ?? 5000 + setTimeout(syncLoop, delay) } } @@ -179,7 +199,7 @@ export function durableStreamCollectionOptions( // Create the getKey function that extracts from RowWithOffset const getKey = (row: RowWithOffset): string | number => { // Extract the original row (without offset) for the user's getKey function - // eslint-disable-next-line @typescript-eslint/no-unused-vars + const { offset: _offset, ...originalRow } = row return config.getKey(originalRow as TRow) } diff --git a/packages/durable-stream-db-collection/src/durable-streams-client.d.ts b/packages/durable-stream-db-collection/src/durable-streams-client.d.ts deleted file mode 100644 index 80538ec62..000000000 --- a/packages/durable-stream-db-collection/src/durable-streams-client.d.ts +++ /dev/null @@ -1,90 +0,0 @@ -/** - * Type declarations for @durable-streams/client - * - * This module provides client types for the Durable Streams protocol. - * See: https://github.com/durable-streams/durable-streams - */ - -declare module '@durable-streams/client' { - export interface DurableStreamOptions { - /** - * URL of the Durable Stream endpoint. - */ - url: string - - /** - * HTTP headers to include in requests. - */ - headers?: Record - } - - export interface FollowOptions { - /** - * The offset to start reading from. - * Use '-1' to read from the beginning. - */ - offset: string - - /** - * Live mode for following the stream. - * - 'long-poll': HTTP long-polling (default) - * - 'sse': Server-Sent Events - */ - live?: 'long-poll' | 'sse' - } - - export interface StreamResult { - /** - * The data from this batch. - * In JSON mode, this is an array of parsed JSON objects. - */ - data: TData - - /** - * The Stream-Next-Offset for this batch. - * Use this offset to resume from this point. - */ - offset: string - } - - export interface ReadOptions { - /** - * The offset to start reading from. - */ - offset?: string - } - - export interface ReadResult extends StreamResult {} - - /** - * Durable Streams client for reading from a Durable Stream. - * - * @example - * ```typescript - * const stream = new DurableStream({ url: 'https://api.example.com/v1/stream/events' }) - * - * // Read from a specific offset - * const result = await stream.read({ offset: '-1' }) - * console.log(result.data, result.offset) - * - * // Follow the stream live - * for await (const result of stream.follow({ offset: '-1', live: 'long-poll' })) { - * console.log(result.data, result.offset) - * } - * ``` - */ - export class DurableStream { - constructor(options: DurableStreamOptions) - - /** - * Read data from the stream starting at the given offset. - */ - read(options?: ReadOptions): Promise> - - /** - * Follow the stream from a given offset, yielding results as they arrive. - * This is an async iterator that yields results continuously. - */ - follow(options: FollowOptions): AsyncIterable> - } -} diff --git a/packages/durable-stream-db-collection/src/types.ts b/packages/durable-stream-db-collection/src/types.ts index a25f92330..cabe90c08 100644 --- a/packages/durable-stream-db-collection/src/types.ts +++ b/packages/durable-stream-db-collection/src/types.ts @@ -105,6 +105,12 @@ export interface DurableStreamCollectionConfig { * Defaults to localStorage in browsers. */ storage?: OffsetStorage + + /** + * AbortSignal to cancel the stream sync. + * When aborted, the sync will stop and cleanup will be called. + */ + signal?: AbortSignal } /** @@ -154,7 +160,7 @@ export interface DurableStreamClient { /** * Follow the stream from a given offset, yielding results as they arrive. */ - follow(options: FollowOptions): AsyncIterable> + follow: (options: FollowOptions) => AsyncIterable> } /** diff --git a/packages/durable-stream-db-collection/tsconfig.json b/packages/durable-stream-db-collection/tsconfig.json index ad23edbc0..eb61958ff 100644 --- a/packages/durable-stream-db-collection/tsconfig.json +++ b/packages/durable-stream-db-collection/tsconfig.json @@ -12,7 +12,8 @@ "forceConsistentCasingInFileNames": true, "paths": { "@tanstack/db-ivm": ["../db-ivm/src"], - "@tanstack/db": ["../db/src"] + "@tanstack/db": ["../db/src"], + "@durable-streams/client": ["../../../../durable-streams/durable-streams/packages/client/src"] } }, "include": ["src", "tests", "vite.config.ts"], diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index c6a532eb9..22b8528f9 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -105,6 +105,173 @@ importers: specifier: ^3.25.76 version: 3.25.76 + ../../durable-streams/durable-streams/packages/benchmarks: + dependencies: + '@durable-streams/writer': + specifier: workspace:* + version: link:../writer + vitest: + specifier: ^3.2.4 + version: 3.2.4(@types/debug@4.1.12)(@types/node@22.19.3)(@vitest/ui@3.2.4)(jiti@2.6.1)(jsdom@27.2.0(postcss@8.5.6))(lightningcss@1.30.2)(sass@1.90.0)(terser@5.44.0)(tsx@4.21.0)(yaml@2.8.1) + devDependencies: + '@types/node': + specifier: ^22.15.21 + version: 22.19.3 + tsdown: + specifier: ^0.9.0 + version: 0.9.9(publint@0.3.15)(typescript@5.9.3) + typescript: + specifier: ^5.0.0 + version: 5.9.3 + + ../../durable-streams/durable-streams/packages/cli: + dependencies: + '@durable-streams/writer': + specifier: workspace:* + version: link:../writer + devDependencies: + '@durable-streams/server': + specifier: workspace:* + version: link:../server + '@types/node': + specifier: ^22.15.21 + version: 22.19.3 + tsdown: + specifier: ^0.9.0 + version: 0.9.9(publint@0.3.15)(typescript@5.9.3) + tsx: + specifier: ^4.19.2 + version: 4.21.0 + typescript: + specifier: ^5.5.2 + version: 5.9.3 + + ../../durable-streams/durable-streams/packages/client: + dependencies: + '@microsoft/fetch-event-source': + specifier: ^2.0.1 + version: 2.0.1 + devDependencies: + '@durable-streams/server': + specifier: workspace:* + version: link:../server + tsdown: + specifier: ^0.9.0 + version: 0.9.9(publint@0.3.15)(typescript@5.9.3) + + ../../durable-streams/durable-streams/packages/conformance-tests: + dependencies: + '@durable-streams/client': + specifier: workspace:* + version: link:../client + '@durable-streams/writer': + specifier: workspace:* + version: link:../writer + vitest: + specifier: ^3.2.4 + version: 3.2.4(@types/debug@4.1.12)(@types/node@24.7.0)(@vitest/ui@3.2.4)(jiti@2.6.1)(jsdom@27.2.0(postcss@8.5.6))(lightningcss@1.30.2)(sass@1.90.0)(terser@5.44.0)(tsx@4.21.0)(yaml@2.8.1) + devDependencies: + tsdown: + specifier: ^0.9.0 + version: 0.9.9(publint@0.3.15)(typescript@5.9.3) + typescript: + specifier: ^5.0.0 + version: 5.9.3 + + ../../durable-streams/durable-streams/packages/server: + dependencies: + '@durable-streams/writer': + specifier: workspace:* + version: link:../writer + '@neophi/sieve-cache': + specifier: ^1.0.0 + version: 1.5.0 + lmdb: + specifier: ^3.3.0 + version: 3.4.2 + devDependencies: + '@durable-streams/conformance-tests': + specifier: workspace:* + version: link:../conformance-tests + '@types/node': + specifier: ^22.0.0 + version: 22.19.3 + tsdown: + specifier: ^0.9.0 + version: 0.9.9(publint@0.3.15)(typescript@5.9.3) + typescript: + specifier: ^5.0.0 + version: 5.9.3 + vitest: + specifier: ^3.2.4 + version: 3.2.4(@types/debug@4.1.12)(@types/node@22.19.3)(@vitest/ui@3.2.4)(jiti@2.6.1)(jsdom@27.2.0(postcss@8.5.6))(lightningcss@1.30.2)(sass@1.90.0)(terser@5.44.0)(tsx@4.21.0)(yaml@2.8.1) + + ../../durable-streams/durable-streams/packages/test-ui: + dependencies: + '@durable-streams/client': + specifier: workspace:^ + version: link:../client + '@durable-streams/writer': + specifier: workspace:^ + version: link:../writer + '@tanstack/react-router': + specifier: ^1.139.14 + version: 1.140.0(react-dom@19.2.1(react@19.2.1))(react@19.2.1) + '@tanstack/router-devtools': + specifier: ^1.139.15 + version: 1.141.2(@tanstack/react-router@1.140.0(react-dom@19.2.1(react@19.2.1))(react@19.2.1))(@tanstack/router-core@1.140.0)(csstype@3.2.3)(react-dom@19.2.1(react@19.2.1))(react@19.2.1)(solid-js@1.9.10) + '@tanstack/router-plugin': + specifier: ^1.139.14 + version: 1.140.0(@tanstack/react-router@1.140.0(react-dom@19.2.1(react@19.2.1))(react@19.2.1))(vite-plugin-solid@2.11.10(@testing-library/jest-dom@6.9.1)(solid-js@1.9.10)(vite@7.2.6(@types/node@24.7.0)(jiti@2.6.1)(lightningcss@1.30.2)(sass@1.90.0)(terser@5.44.0)(tsx@4.21.0)(yaml@2.8.1)))(vite@7.2.6(@types/node@24.7.0)(jiti@2.6.1)(lightningcss@1.30.2)(sass@1.90.0)(terser@5.44.0)(tsx@4.21.0)(yaml@2.8.1)) + react: + specifier: ^19.2.1 + version: 19.2.1 + react-dom: + specifier: ^19.2.1 + version: 19.2.1(react@19.2.1) + devDependencies: + '@types/react': + specifier: ^19.2.7 + version: 19.2.7 + '@types/react-dom': + specifier: ^19.2.3 + version: 19.2.3(@types/react@19.2.7) + '@vitejs/plugin-react': + specifier: ^5.1.2 + version: 5.1.2(vite@7.2.6(@types/node@24.7.0)(jiti@2.6.1)(lightningcss@1.30.2)(sass@1.90.0)(terser@5.44.0)(tsx@4.21.0)(yaml@2.8.1)) + typescript: + specifier: ~5.9.3 + version: 5.9.3 + vite: + specifier: ^7.2.4 + version: 7.2.6(@types/node@24.7.0)(jiti@2.6.1)(lightningcss@1.30.2)(sass@1.90.0)(terser@5.44.0)(tsx@4.21.0)(yaml@2.8.1) + + ../../durable-streams/durable-streams/packages/wrapper-sdk: + dependencies: + '@durable-streams/client': + specifier: workspace:* + version: link:../client + '@durable-streams/writer': + specifier: workspace:* + version: link:../writer + devDependencies: + '@durable-streams/server': + specifier: workspace:* + version: link:../server + + ../../durable-streams/durable-streams/packages/writer: + dependencies: + '@durable-streams/client': + specifier: workspace:* + version: link:../client + fastq: + specifier: ^1.19.1 + version: 1.19.1 + devDependencies: + '@durable-streams/server': + specifier: workspace:* + version: link:../server + examples/angular/todos: dependencies: '@angular/common': @@ -766,6 +933,25 @@ importers: specifier: ^3.2.4 version: 3.2.4(vitest@3.2.4) + packages/durable-stream-db-collection: + dependencies: + '@durable-streams/client': + specifier: workspace:* + version: link:../../../../durable-streams/durable-streams/packages/client + '@standard-schema/spec': + specifier: ^1.0.0 + version: 1.0.0 + '@tanstack/db': + specifier: workspace:* + version: link:../db + devDependencies: + '@vitest/coverage-istanbul': + specifier: ^3.2.4 + version: 3.2.4(vitest@3.2.4) + zod: + specifier: ^3.23.0 + version: 3.25.76 + packages/electric-db-collection: dependencies: '@electric-sql/client': @@ -2847,6 +3033,10 @@ packages: '@napi-rs/wasm-runtime@1.0.7': resolution: {integrity: sha512-SeDnOO0Tk7Okiq6DbXmmBODgOAb9dp9gjlphokTUxmt8U3liIP1ZsozBahH69j/RJv+Rfs6IwUKHTgQYJ/HBAw==} + '@neophi/sieve-cache@1.5.0': + resolution: {integrity: sha512-9T3nD5q51X1d4QYW6vouKW9hBSb2Tb/wB/2XoTr4oP5SCGtp3a7aTHHewQFylred1B21/Bhev6gy4x01FPBcbQ==} + engines: {node: '>=18'} + '@noble/ciphers@2.0.1': resolution: {integrity: sha512-xHK3XHPUW8DTAobU+G0XT+/w+JLM7/8k1UFdB5xg/zTFPnFCobhftzw8wl4Lw2aq/Rvir5pxfZV5fEazmeCJ2g==} engines: {node: '>= 20.19.0'} @@ -2927,6 +3117,9 @@ packages: resolution: {integrity: sha512-3giAOQvZiH5F9bMlMiv8+GSPMeqg0dbaeo58/0SlA9sxSqZhnUtxzX9/2FzyhS9sWQf5S0GJE0AKBrFqjpeYcg==} engines: {node: '>=8.0.0'} + '@oxc-project/types@0.66.0': + resolution: {integrity: sha512-KF5Wlo2KzQ+jmuCtrGISZoUfdHom7qHavNfPLW2KkeYJfYMGwtiia8KjwtsvNJ49qRiXImOCkPeVPd4bMlbR7w==} + '@oxc-resolver/binding-android-arm-eabi@11.13.2': resolution: {integrity: sha512-vWd1NEaclg/t2DtEmYzRRBNQOueMI8tixw/fSNZ9XETXLRJiAjQMYpYeflQdRASloGze6ZelHE/wIBNt4S+pkw==} cpu: [arm] @@ -2942,21 +3135,41 @@ packages: cpu: [arm64] os: [darwin] + '@oxc-resolver/binding-darwin-arm64@9.0.2': + resolution: {integrity: sha512-MVyRgP2gzJJtAowjG/cHN3VQXwNLWnY+FpOEsyvDepJki1SdAX/8XDijM1yN6ESD1kr9uhBKjGelC6h3qtT+rA==} + cpu: [arm64] + os: [darwin] + '@oxc-resolver/binding-darwin-x64@11.13.2': resolution: {integrity: sha512-tDcyWtkUzkt6auJLP2dOjL84BxqHkKW4mz2lNRIGPTq7b+HBraB+m8RdRH6BgqTvbnNECOxR3XAMaKBKC8J51g==} cpu: [x64] os: [darwin] + '@oxc-resolver/binding-darwin-x64@9.0.2': + resolution: {integrity: sha512-7kV0EOFEZ3sk5Hjy4+bfA6XOQpCwbDiDkkHN4BHHyrBHsXxUR05EcEJPPL1WjItefg+9+8hrBmoK0xRoDs41+A==} + cpu: [x64] + os: [darwin] + '@oxc-resolver/binding-freebsd-x64@11.13.2': resolution: {integrity: sha512-fpaeN8Q0kWvKns9uSMg6CcKo7cdgmWt6J91stPf8sdM+EKXzZ0YcRnWWyWF8SM16QcLUPCy5Iwt5Z8aYBGaZYA==} cpu: [x64] os: [freebsd] + '@oxc-resolver/binding-freebsd-x64@9.0.2': + resolution: {integrity: sha512-6OvkEtRXrt8sJ4aVfxHRikjain9nV1clIsWtJ1J3J8NG1ZhjyJFgT00SCvqxbK+pzeWJq6XzHyTCN78ML+lY2w==} + cpu: [x64] + os: [freebsd] + '@oxc-resolver/binding-linux-arm-gnueabihf@11.13.2': resolution: {integrity: sha512-idBgJU5AvSsGOeaIWiFBKbNBjpuduHsJmrG4CBbEUNW/Ykx+ISzcuj1PHayiYX6R9stVsRhj3d2PyymfC5KWRg==} cpu: [arm] os: [linux] + '@oxc-resolver/binding-linux-arm-gnueabihf@9.0.2': + resolution: {integrity: sha512-aYpNL6o5IRAUIdoweW21TyLt54Hy/ZS9tvzNzF6ya1ckOQ8DLaGVPjGpmzxdNja9j/bbV6aIzBH7lNcBtiOTkQ==} + cpu: [arm] + os: [linux] + '@oxc-resolver/binding-linux-arm-musleabihf@11.13.2': resolution: {integrity: sha512-BlBvQUhvvIM/7s96KlKhMk0duR2sj8T7Hyii46/5QnwfN/pHwobvOL5czZ6/SKrHNB/F/qDY4hGsBuB1y7xgTg==} cpu: [arm] @@ -2967,11 +3180,21 @@ packages: cpu: [arm64] os: [linux] + '@oxc-resolver/binding-linux-arm64-gnu@9.0.2': + resolution: {integrity: sha512-RGFW4vCfKMFEIzb9VCY0oWyyY9tR1/o+wDdNePhiUXZU4SVniRPQaZ1SJ0sUFI1k25pXZmzQmIP6cBmazi/Dew==} + cpu: [arm64] + os: [linux] + '@oxc-resolver/binding-linux-arm64-musl@11.13.2': resolution: {integrity: sha512-dkGzOxo+I9lA4Er6qzFgkFevl3JvwyI9i0T/PkOJHva04rb1p9dz8GPogTO9uMK4lrwLWzm/piAu+tHYC7v7+w==} cpu: [arm64] os: [linux] + '@oxc-resolver/binding-linux-arm64-musl@9.0.2': + resolution: {integrity: sha512-lxx/PibBfzqYvut2Y8N2D0Ritg9H8pKO+7NUSJb9YjR/bfk2KRmP8iaUz3zB0JhPtf/W3REs65oKpWxgflGToA==} + cpu: [arm64] + os: [linux] + '@oxc-resolver/binding-linux-ppc64-gnu@11.13.2': resolution: {integrity: sha512-53kWsjLkVFnoSA7COdps38pBssN48zI8LfsOvupsmQ0/4VeMYb+0Ao9O6r52PtmFZsGB3S1Qjqbjl/Pswj1a3g==} cpu: [ppc64] @@ -2982,6 +3205,11 @@ packages: cpu: [riscv64] os: [linux] + '@oxc-resolver/binding-linux-riscv64-gnu@9.0.2': + resolution: {integrity: sha512-yD28ptS/OuNhwkpXRPNf+/FvrO7lwURLsEbRVcL1kIE0GxNJNMtKgIE4xQvtKDzkhk6ZRpLho5VSrkkF+3ARTQ==} + cpu: [riscv64] + os: [linux] + '@oxc-resolver/binding-linux-riscv64-musl@11.13.2': resolution: {integrity: sha512-WXrm4YiRU0ijqb72WHSjmfYaQZ7t6/kkQrFc4JtU+pUE4DZA/DEdxOuQEd4Q43VqmLvICTJWSaZMlCGQ4PSRUg==} cpu: [riscv64] @@ -2992,26 +3220,51 @@ packages: cpu: [s390x] os: [linux] + '@oxc-resolver/binding-linux-s390x-gnu@9.0.2': + resolution: {integrity: sha512-WBwEJdspoga2w+aly6JVZeHnxuPVuztw3fPfWrei2P6rNM5hcKxBGWKKT6zO1fPMCB4sdDkFohGKkMHVV1eryQ==} + cpu: [s390x] + os: [linux] + '@oxc-resolver/binding-linux-x64-gnu@11.13.2': resolution: {integrity: sha512-DVo6jS8n73yNAmCsUOOk2vBeC60j2RauDXQM8p7RDl0afsEaA2le22vD8tky7iNoM5tsxfBmE4sOJXEKgpwWRw==} cpu: [x64] os: [linux] + '@oxc-resolver/binding-linux-x64-gnu@9.0.2': + resolution: {integrity: sha512-a2z3/cbOOTUq0UTBG8f3EO/usFcdwwXnCejfXv42HmV/G8GjrT4fp5+5mVDoMByH3Ce3iVPxj1LmS6OvItKMYQ==} + cpu: [x64] + os: [linux] + '@oxc-resolver/binding-linux-x64-musl@11.13.2': resolution: {integrity: sha512-6WqrE+hQBFP35KdwQjWcZpldbTq6yJmuTVThISu+rY3+j6MaDp2ciLHTr1X68r2H/7ocOIl4k3NnOVIzeRJE3w==} cpu: [x64] os: [linux] + '@oxc-resolver/binding-linux-x64-musl@9.0.2': + resolution: {integrity: sha512-bHZF+WShYQWpuswB9fyxcgMIWVk4sZQT0wnwpnZgQuvGTZLkYJ1JTCXJMtaX5mIFHf69ngvawnwPIUA4Feil0g==} + cpu: [x64] + os: [linux] + '@oxc-resolver/binding-wasm32-wasi@11.13.2': resolution: {integrity: sha512-YpxvQmP2D+mNUkLQZbBjGz20g/pY8XoOBdPPoWMl9X68liFFjXxkPQTrZxWw4zzG/UkTM5z6dPRTyTePRsMcjw==} engines: {node: '>=14.0.0'} cpu: [wasm32] + '@oxc-resolver/binding-wasm32-wasi@9.0.2': + resolution: {integrity: sha512-I5cSgCCh5nFozGSHz+PjIOfrqW99eUszlxKLgoNNzQ1xQ2ou9ZJGzcZ94BHsM9SpyYHLtgHljmOZxCT9bgxYNA==} + engines: {node: '>=14.0.0'} + cpu: [wasm32] + '@oxc-resolver/binding-win32-arm64-msvc@11.13.2': resolution: {integrity: sha512-1SKBw6KcCmvPBdEw1/Qdpv6eSDf23lCXTWz9VxTe6QUQ/1wR+HZR2uS4q6C8W6jnIswMTQbxpTvVwdRXl+ufeA==} cpu: [arm64] os: [win32] + '@oxc-resolver/binding-win32-arm64-msvc@9.0.2': + resolution: {integrity: sha512-5IhoOpPr38YWDWRCA5kP30xlUxbIJyLAEsAK7EMyUgqygBHEYLkElaKGgS0X5jRXUQ6l5yNxuW73caogb2FYaw==} + cpu: [arm64] + os: [win32] + '@oxc-resolver/binding-win32-ia32-msvc@11.13.2': resolution: {integrity: sha512-KEVV7wggDucxRn3vvyHnmTCPXoCT7vWpH18UVLTygibHJvNRP2zl5lBaQcCIdIaYYZjKt1aGI/yZqxZvHoiCdg==} cpu: [ia32] @@ -3022,6 +3275,70 @@ packages: cpu: [x64] os: [win32] + '@oxc-resolver/binding-win32-x64-msvc@9.0.2': + resolution: {integrity: sha512-Qc40GDkaad9rZksSQr2l/V9UubigIHsW69g94Gswc2sKYB3XfJXfIfyV8WTJ67u6ZMXsZ7BH1msSC6Aen75mCg==} + cpu: [x64] + os: [win32] + + '@oxc-transform/binding-darwin-arm64@0.67.0': + resolution: {integrity: sha512-P3zBMhpOQceNSys3/ZqvrjuRvcIbVzfGFN/tH34HlVkOjOmfGK1mOWjORsGAZtbgh1muXrF6mQETLzFjfYndXQ==} + engines: {node: '>=14.0.0'} + cpu: [arm64] + os: [darwin] + + '@oxc-transform/binding-darwin-x64@0.67.0': + resolution: {integrity: sha512-B52aeo/C3spYHcwFQ4nAbDkwbMKf0K6ncWM8GrVUgGu8PPECLBhjPCW11kPW/lt9FxwrdgVYVzPYlZ6wmJmpEA==} + engines: {node: '>=14.0.0'} + cpu: [x64] + os: [darwin] + + '@oxc-transform/binding-linux-arm-gnueabihf@0.67.0': + resolution: {integrity: sha512-5Ir1eQrC9lvj/rR1TJVGwOR4yLgXTLmfKHIfpVH7GGSQrzK7VMUfHWX+dAsX1VutaeE8puXIqtYvf9cHLw78dw==} + engines: {node: '>=14.0.0'} + cpu: [arm] + os: [linux] + + '@oxc-transform/binding-linux-arm64-gnu@0.67.0': + resolution: {integrity: sha512-zTqfPET5+hZfJ3/dMqJboKxrpXMXk+j2HVdvX0wVhW2MI7n7hwELl+In6Yu20nXuEyJkNQlWHbNPCUfpM+cBWw==} + engines: {node: '>=14.0.0'} + cpu: [arm64] + os: [linux] + + '@oxc-transform/binding-linux-arm64-musl@0.67.0': + resolution: {integrity: sha512-jzz/ATUhZ8wetb4gm5GwzheZns3Qj1CZ+DIMmD8nBxQXszmTS/fqnAPpgzruyLqkXBUuUfF3pHv5f/UmuHReuQ==} + engines: {node: '>=14.0.0'} + cpu: [arm64] + os: [linux] + + '@oxc-transform/binding-linux-x64-gnu@0.67.0': + resolution: {integrity: sha512-Qy2+tfglJ8yX6guC1EDAnuuzRZIXciXO9UwOewxyiahLxwuTpj/wvvZN3Cb1SA3c14zrwb2TNMZvaXS1/OS5Pg==} + engines: {node: '>=14.0.0'} + cpu: [x64] + os: [linux] + + '@oxc-transform/binding-linux-x64-musl@0.67.0': + resolution: {integrity: sha512-tHoYgDIRhgvh+/wIrzAk3cUoj/LSSoJAdsZW9XRlaixFW/TF2puxRyaS1hRco0bcKTwotXl/eDYqZmhIfUyGRQ==} + engines: {node: '>=14.0.0'} + cpu: [x64] + os: [linux] + + '@oxc-transform/binding-wasm32-wasi@0.67.0': + resolution: {integrity: sha512-ZPT+1HECf7WUnotodIuS8tvSkwaiCdC2DDw8HVRmlerbS6iPYIPKyBCvkSM4RyUx0kljZtB9AciLCkEbwy5/zA==} + engines: {node: '>=14.0.0'} + cpu: [wasm32] + + '@oxc-transform/binding-win32-arm64-msvc@0.67.0': + resolution: {integrity: sha512-+E3lOHCk4EuIk6IjshBAARknAUpgH+gHTtZxCPqK4AWYA+Tls2J6C0FVM48uZ4m3rZpAq8ZszM9JZVAkOaynBQ==} + engines: {node: '>=14.0.0'} + cpu: [arm64] + os: [win32] + + '@oxc-transform/binding-win32-x64-msvc@0.67.0': + resolution: {integrity: sha512-3pIIFb9g5aFrAODTQVJYitq+ONHgDJ4IYk/7pk+jsG6JpKUkURd0auUlxvriO11fFit5hdwy+wIbU4kBvyRUkg==} + engines: {node: '>=14.0.0'} + cpu: [x64] + os: [win32] + '@parcel/watcher-android-arm64@2.5.1': resolution: {integrity: sha512-KF8+j9nNbUN8vzOFDpRMsaKBHZ/mcjEjMToVMJOhTozkDonQFFrRcfdLWn6yWKCmJKmdVxSgHiYvTCef4/qcBA==} engines: {node: '>= 10.0.0'} @@ -3201,6 +3518,69 @@ packages: resolution: {integrity: sha512-S+9ANAvUmjutrshV4jZjaiG8XQyuJIZ8a4utWmN/vW1sgQ9IfBnPndwkmQYw53QmouOIytT874u65HEmu6H5jw==} engines: {node: '>=18'} + '@quansync/fs@1.0.0': + resolution: {integrity: sha512-4TJ3DFtlf1L5LDMaM6CanJ/0lckGNtJcMjQ1NAV6zDmA0tEHKZtxNKin8EgPaVX1YzljbxckyT2tJrpQKAtngQ==} + + '@rolldown/binding-darwin-arm64@1.0.0-beta.8-commit.151352b': + resolution: {integrity: sha512-2F4bhDtV6CHBx7JMiT9xvmxkcZLHFmonfbli36RyfvgThDOAu92bis28zDTdguDY85lN/jBRKX/eOvX+T5hMkg==} + cpu: [arm64] + os: [darwin] + + '@rolldown/binding-darwin-x64@1.0.0-beta.8-commit.151352b': + resolution: {integrity: sha512-8VMChhFLeD/oOAQUspFtxZaV7ctDob63w626kwvBBIHtlpY2Ohw4rsfjjtGckyrTCI/RROgZv/TVVEsG3GkgLw==} + cpu: [x64] + os: [darwin] + + '@rolldown/binding-freebsd-x64@1.0.0-beta.8-commit.151352b': + resolution: {integrity: sha512-4W28EgaIidbWIpwB3hESMBfiOSs7LBFpJGa8JIV488qLEnTR/pqzxDEoOPobhRSJ1lJlv0vUgA8+DKBIldo2gw==} + cpu: [x64] + os: [freebsd] + + '@rolldown/binding-linux-arm-gnueabihf@1.0.0-beta.8-commit.151352b': + resolution: {integrity: sha512-1ECtyzIKlAHikR7BhS4hk7Hxw8xCH6W3S+Sb74EM0vy5AqPvWSbgLfAwagYC7gNDcMMby3I757X7qih5fIrGiw==} + cpu: [arm] + os: [linux] + + '@rolldown/binding-linux-arm64-gnu@1.0.0-beta.8-commit.151352b': + resolution: {integrity: sha512-wU1kp8qPRUKC8N82dNs3F5+UyKRww9TUEO5dQ5mxCb0cG+y4l5rVaXpMgvL0VuQahPVvTMs577QPhJGb4iDONw==} + cpu: [arm64] + os: [linux] + + '@rolldown/binding-linux-arm64-musl@1.0.0-beta.8-commit.151352b': + resolution: {integrity: sha512-odDjO2UtEEMAzwmLHEOKylJjQa+em1REAO9H19PA+O+lPu6evVbre5bqu8qCjEtHG1Q034LpZR86imCP2arb/w==} + cpu: [arm64] + os: [linux] + + '@rolldown/binding-linux-x64-gnu@1.0.0-beta.8-commit.151352b': + resolution: {integrity: sha512-Ty2T67t2Oj1lg417ATRENxdk8Jkkksc/YQdCJyvkGqteHe60pSU2GGP/tLWGB+I0Ox+u387bzU/SmfmrHZk9aw==} + cpu: [x64] + os: [linux] + + '@rolldown/binding-linux-x64-musl@1.0.0-beta.8-commit.151352b': + resolution: {integrity: sha512-Fm1TxyeVE+gy74HM26CwbEOUndIoWAMgWkVDxYBD64tayvp5JvltpGHaqCg6x5i+X2F5XCDCItqwVlC7/mTxIw==} + cpu: [x64] + os: [linux] + + '@rolldown/binding-wasm32-wasi@1.0.0-beta.8-commit.151352b': + resolution: {integrity: sha512-AEZzTyGerfkffXmtv7kFJbHWkryNeolk0Br+yhH1wZyN6Tt6aebqICDL8KNRO2iExoEWzyYS6dPxh0QmvNTfUQ==} + engines: {node: '>=14.21.3'} + cpu: [wasm32] + + '@rolldown/binding-win32-arm64-msvc@1.0.0-beta.8-commit.151352b': + resolution: {integrity: sha512-0lskDFKQwf5PMjl17qHAroU6oVU0Zn8NbAH/PdM9QB1emOzyFDGa20d4kESGeo3Uq7xOKXcTORJV/JwKIBORqw==} + cpu: [arm64] + os: [win32] + + '@rolldown/binding-win32-ia32-msvc@1.0.0-beta.8-commit.151352b': + resolution: {integrity: sha512-DfG1S0zGKnUfr95cNCmR4YPiZ/moS7Tob5eV+9r5JGeHZVWFHWwvJdR0jArj6Ty0LbBFDTVVB3iAvqRSji+l0Q==} + cpu: [ia32] + os: [win32] + + '@rolldown/binding-win32-x64-msvc@1.0.0-beta.8-commit.151352b': + resolution: {integrity: sha512-5HZEtc8U2I1O903hXBynWtWaf+qzAFj66h5B7gOtVcvqIk+lKRVSupA85OdIvR7emrsYU25ikpfiU5Jhg9kTbQ==} + cpu: [x64] + os: [win32] + '@rolldown/pluginutils@1.0.0-beta.40': resolution: {integrity: sha512-s3GeJKSQOwBlzdUrj4ISjJj5SfSh+aqn0wjOar4Bx95iV1ETI7F6S/5hLcfAxZ9kXDcyrAkxPlqmd1ZITttf+w==} @@ -3210,6 +3590,9 @@ packages: '@rolldown/pluginutils@1.0.0-beta.50': resolution: {integrity: sha512-5e76wQiQVeL1ICOZVUg4LSOVYg9jyhGCin+icYozhsUzM+fHE7kddi1bdiE0jwVqTfkjba3jUFbEkoC9WkdvyA==} + '@rolldown/pluginutils@1.0.0-beta.53': + resolution: {integrity: sha512-vENRlFU4YbrwVqNDZ7fLvy+JR1CRkyr01jhSiDpE1u6py3OMzQfztQU2jxykW3ALNxO4kSlqIDeYyD0Y9RcQeQ==} + '@rollup/pluginutils@5.3.0': resolution: {integrity: sha512-5EdhGZtnu3V88ces7s53hhfK5KSASnJZv8Lulpc04cWO3REESroJXg73DFsOmgbU2BhwV0E20bu2IDZb3VKW4Q==} engines: {node: '>=14.0.0'} @@ -3795,6 +4178,18 @@ packages: '@tanstack/router-core': optional: true + '@tanstack/react-router-devtools@1.141.2': + resolution: {integrity: sha512-E55O6sYRCHpTMDB+jDaZ8so4G+/Sg5D/bPvomx35hsHrXEc6RaiGHzzWy0bfrc+PVcmhP2sTTBfVakjJfQolAQ==} + engines: {node: '>=12'} + peerDependencies: + '@tanstack/react-router': ^1.141.2 + '@tanstack/router-core': ^1.141.2 + react: '>=18.0.0 || >=19.0.0' + react-dom: '>=18.0.0 || >=19.0.0' + peerDependenciesMeta: + '@tanstack/router-core': + optional: true + '@tanstack/react-router-with-query@1.130.17': resolution: {integrity: sha512-TNaSocW20KuPwUojEm130DLWTr9M5hsSzxiu4QqS2jNCnrGLuDrwMHyP+6fq13lG3YuU4u9O1qajxfJIGomZCg==} engines: {node: '>=12'} @@ -3855,6 +4250,29 @@ packages: csstype: optional: true + '@tanstack/router-devtools-core@1.141.2': + resolution: {integrity: sha512-ZvXuq8ASvIzffyl61BwSdAWh//Tp+wBn0GcSIP/LOrp0f/bW8aODPXm1RSGY2/tXrSjntdP7XPID50YXZdyKfg==} + engines: {node: '>=12'} + peerDependencies: + '@tanstack/router-core': ^1.141.2 + csstype: ^3.0.10 + solid-js: '>=1.9.5' + peerDependenciesMeta: + csstype: + optional: true + + '@tanstack/router-devtools@1.141.2': + resolution: {integrity: sha512-UUmY22guSuOuFtvMIAY95eBCNhrpiBbkbc/fpy0PKYIuSfiUyvhSUtPZcrQLE16jmGU1G4NM38d1WoKCJeQmNw==} + engines: {node: '>=12'} + peerDependencies: + '@tanstack/react-router': ^1.141.2 + csstype: ^3.0.10 + react: '>=18.0.0 || >=19.0.0' + react-dom: '>=18.0.0 || >=19.0.0' + peerDependenciesMeta: + csstype: + optional: true + '@tanstack/router-generator@1.140.0': resolution: {integrity: sha512-YYq/DSn7EkBboCySf87RDH3mNq3AfN18v4qHmre73KOdxUJchTZ4LC1+8vbO/1K/Uus2ZFXUDy7QX5KziNx08g==} engines: {node: '>=12'} @@ -4090,6 +4508,9 @@ packages: '@types/node@12.20.55': resolution: {integrity: sha512-J8xLz7q2OFulZ2cyGTLE1TbbZcjpno7FaN6zdJNrgAdrJ+DZzh/uFR6YrTb4C+nXakvud8Q4+rbhoIWlYQbUFQ==} + '@types/node@22.19.3': + resolution: {integrity: sha512-1N9SBnWYOJTrNZCdh/yJE+t910Y128BoyY+zBLWhL3r0TYzlTmFdXrPwHL9DyFZmlEXNQQolTZh3KHV31QDhyA==} + '@types/node@24.7.0': resolution: {integrity: sha512-IbKooQVqUBrlzWTi79E8Fw78l8k1RNtlDDNWsFZs7XonuQSJ8oNYfEeclhprUldXISRMLzBpILuKgPlIxm+/Yw==} @@ -4350,6 +4771,11 @@ packages: cpu: [x64] os: [win32] + '@valibot/to-json-schema@1.0.0': + resolution: {integrity: sha512-/9crJgPptVsGCL6X+JPDQyaJwkalSZ/52WuF8DiRUxJgcmpNdzYRfZ+gqMEP8W3CTVfuMWPqqvIgfwJ97f9Etw==} + peerDependencies: + valibot: ^1.0.0 + '@vitejs/plugin-basic-ssl@2.1.0': resolution: {integrity: sha512-dOxxrhgyDIEUADhb/8OlV9JIqYLgos03YorAueTIeOUskLJSEsfwCByjbu98ctXitUN3znXKp0bYD/WHSudCeA==} engines: {node: ^18.0.0 || ^20.0.0 || >=22.0.0} @@ -4362,6 +4788,12 @@ packages: peerDependencies: vite: ^4.2.0 || ^5.0.0 || ^6.0.0 || ^7.0.0 + '@vitejs/plugin-react@5.1.2': + resolution: {integrity: sha512-EcA07pHJouywpzsoTUqNh5NwGayl2PPVEJKUSinGGSxFGYn+shYbqMGBg6FXDqgXum9Ou/ecb+411ssw8HImJQ==} + engines: {node: ^20.19.0 || >=22.12.0} + peerDependencies: + vite: ^4.2.0 || ^5.0.0 || ^6.0.0 || ^7.0.0 + '@vitejs/plugin-vue@6.0.2': resolution: {integrity: sha512-iHmwV3QcVGGvSC1BG5bZ4z6iwa1SOpAPWmnjOErd4Ske+lZua5K9TtAVdx0gMBClJ28DViCbSmZitjWZsWO3LA==} engines: {node: ^20.19.0 || >=22.12.0} @@ -4554,6 +4986,10 @@ packages: resolution: {integrity: sha512-4Dj6M28JB+oAH8kFkTLUo+a2jwOFkuqb3yucU0CANcRRUbxS0cP0nZYCGjcc3BNXwRIsUVmDGgzawme7zvJHvg==} engines: {node: '>=12'} + ansis@3.17.0: + resolution: {integrity: sha512-0qWUglt9JEqLFr3w1I1pbrChn1grhaiAR2ocX1PP/flRmxgtwTzPFFFnfIlD6aMOLQZgSuCRlidD70lvx8yhzg==} + engines: {node: '>=14'} + ansis@4.1.0: resolution: {integrity: sha512-BGcItUBWSMRgOCe+SVZJ+S7yTRG0eGt9cXAHev72yuGcY23hnLA7Bky5L/xLyPINoSN95geovfBkqoTlNZYa7w==} engines: {node: '>=14'} @@ -4633,6 +5069,10 @@ packages: resolution: {integrity: sha512-Izi8RQcffqCeNVgFigKli1ssklIbpHnCYc6AknXGYoB6grJqyeby7jv12JUQgmTAnIDnbck1uxksT4dzN3PWBA==} engines: {node: '>=12'} + ast-kit@1.4.3: + resolution: {integrity: sha512-MdJqjpodkS5J149zN0Po+HPshkTdUyrvF7CKTafUgv69vBSPtncrj+3IiUgqdd7ElIEkbeXCsEouBUwLrw9Ilg==} + engines: {node: '>=16.14.0'} + ast-types@0.16.1: resolution: {integrity: sha512-6t10qk83GOG8p0vKmaCr8eiilZwO171AvbROMtvvNiwrTly62t+7XkA8RdIIVbpMhCASAsxgAzdRSwh6nw/5Dg==} engines: {node: '>=4'} @@ -4941,6 +5381,10 @@ packages: resolution: {integrity: sha512-ZqRXc+tZukToSNmh5C2iWMSoV3X1YUcPbqEM4DkEG5tNQXrQUZCNVGGv3IuicnkMtPfGf3Xtp8WCXs295iQ1pQ==} engines: {node: '>= 0.10.0'} + consola@3.4.2: + resolution: {integrity: sha512-5IKcdX0nnYavi6G7TtOhwkYzyjfJlatbjMjuLSfE2kYT5pMDOilZ4OvMhi637CcDICTmz3wARPoyhqyX1Y+XvA==} + engines: {node: ^14.18.0 || >=16.10.0} + content-disposition@0.5.4: resolution: {integrity: sha512-FveZTNuGw04cxlAiWbzi6zTAL/lhehaWbTtgluJh4/E95DqMwTmha3KZN1aAWA8cFIhHzMZUvLevkw5Rqk+tSQ==} engines: {node: '>= 0.6'} @@ -5153,6 +5597,10 @@ packages: di@0.0.1: resolution: {integrity: sha512-uJaamHkagcZtHPqCIHZxnFrXlunQXgBOsZSUOWwFw31QJCAbyTBoHMW75YOTur5ZNx8pIeAKgf6GWIgaqqiLhA==} + diff@7.0.0: + resolution: {integrity: sha512-PJWHUb1RFevKCwaFA9RlG5tCd+FO5iRh9A8HEtkmBH2Li03iJriB6m6JIN4rGz3K3JLawI7/veA1xzRKP6ISBw==} + engines: {node: '>=0.3.1'} + diff@8.0.2: resolution: {integrity: sha512-sSuxWU5j5SR9QQji/o2qMvqRNYRDOcBTgsJ/DeCf4iSN4gW+gNMXM7wFIP+fdXZxoNiAnHUTGjCr+TSWXdRDKg==} engines: {node: '>=0.3.1'} @@ -5301,6 +5749,10 @@ packages: drizzle-orm: '>=0.36.0' zod: ^3.25.0 || ^4.0.0 + dts-resolver@1.2.0: + resolution: {integrity: sha512-+xNF7raXYI1E3IFB+f3JqvoKYFI8R+1Mh9mpI75yNm3F5XuiC6ErEXe2Lqh9ach+4MQ1tOefzjxulhWGVclYbg==} + engines: {node: '>=20.18.0'} + dunder-proto@1.0.1: resolution: {integrity: sha512-KIN/nDJBQRcXw0MLVhZE9iQHmG68qAVIBg9CqmUYjmQIhgij9U5MFvrqkUL5FbtyyzZuOeOt0zdeRe4UY7ct+A==} engines: {node: '>= 0.4'} @@ -5323,6 +5775,10 @@ packages: emoji-regex@9.2.2: resolution: {integrity: sha512-L18DaJsXSUk2+42pv8mLs5jJT2hqFkFE4j21wOmgbUqsZ2hL72NsUU785g9RXgo3s0ZNgVl42TiHp3ZtOv/Vyg==} + empathic@1.1.0: + resolution: {integrity: sha512-rsPft6CK3eHtrlp9Y5ALBb+hfK+DWnA4WFebbazxjWyx8vSm3rZeoM3z9irsjcqO3PYRzlfv27XIB4tz2DV7RA==} + engines: {node: '>=14'} + encodeurl@1.0.2: resolution: {integrity: sha512-TPJXq8JqFaVYm2CWmPvnP2Iyo4ZSM7/QKcSmuMLDObfpH5fi7RUGmd/rTDf+rut/saiDiQEeVTNgAmJEdAOx0w==} engines: {node: '>= 0.8'} @@ -5990,6 +6446,9 @@ packages: resolution: {integrity: sha512-F/1DnUGPopORZi0ni+CvrCgHQ5FyEAHRLSApuYWMmrbSwoN2Mn/7k+Gl38gJnR7yyDZk6WLXwiGod1JOWNDKGw==} hasBin: true + hookable@5.5.3: + resolution: {integrity: sha512-Yc+BQe8SvoXH1643Qez1zqLRmbA5rCL+sSmk6TVos0LWVfNIB7PGncdlId77WzLGSIB5KaWgTaNTs2lNVEI6VQ==} + hosted-git-info@8.1.0: resolution: {integrity: sha512-Rw/B2DNQaPBICNXEm8balFz9a6WpZrkCGpcWFpy7nCj+NyhSdqXipmfvtmWt9xGfp0wZnBxB+iVpLmQMYt47Tw==} engines: {node: ^18.17.0 || >=20.5.0} @@ -7152,7 +7611,14 @@ packages: oxc-resolver@11.13.2: resolution: {integrity: sha512-1SXVyYQ9bqMX3uZo8Px81EG7jhZkO9PvvR5X9roY5TLYVm4ZA7pbPDNlYaDBBeF9U+YO3OeMNoHde52hrcCu8w==} - p-defer@4.0.1: + oxc-resolver@9.0.2: + resolution: {integrity: sha512-w838ygc1p7rF+7+h5vR9A+Y9Fc4imy6C3xPthCMkdFUgFvUWkmABeNB8RBDQ6+afk44Q60/UMMQ+gfDUW99fBA==} + + oxc-transform@0.67.0: + resolution: {integrity: sha512-QXwmpLfNrXZoHgIjEtDEf6lhwmvHouNtstNgg/UveczVIjo8VSzd5h25Ea96PoX9KzReJUY/qYa4QSNkJpZGfA==} + engines: {node: '>=14.0.0'} + + p-defer@4.0.1: resolution: {integrity: sha512-Mr5KC5efvAK5VUptYEIopP1bakB85k2IWXaRC0rsh1uwn1L6M0LVml8OIQ4Gudg4oyZakf7FmeRLkMMtZW1i5A==} engines: {node: '>=12'} @@ -7468,6 +7934,9 @@ packages: quansync@0.2.11: resolution: {integrity: sha512-AifT7QEbW9Nri4tAwR5M/uzpBuqfZf+zwaEM/QkzEjj7NBuFD2rBuy0K3dE+8wltbezDV7JMA0WfnCPYRSYbXA==} + quansync@1.0.0: + resolution: {integrity: sha512-5xZacEEufv3HSTPQuchrvV6soaiACMFnq1H8wkVioctoH3TRha9Sz66lOxRwPK/qZj7HPiSveih9yAyh98gvqA==} + queue-microtask@1.2.3: resolution: {integrity: sha512-NuaNSa6flKT5JaSYQzJok04JzTL1CA6aGhv5rfLW3PgqA+M2ChpZQnAC8h8i4ZFkBS8X5RqkDBHA7r4hej3K9A==} @@ -7602,6 +8071,25 @@ packages: deprecated: Rimraf versions prior to v4 are no longer supported hasBin: true + rolldown-plugin-dts@0.9.11: + resolution: {integrity: sha512-iCIRKmvPLwRV4UKSxhaBo+5wDkvc3+MFiqYYvu7sGLSohzxoDn9WEsjN3y7A6xg3aCuxHh6rlRp8xbX98r1rSg==} + engines: {node: '>=20.18.0'} + peerDependencies: + rolldown: ^1.0.0-beta.7 + typescript: ^5.0.0 + peerDependenciesMeta: + typescript: + optional: true + + rolldown@1.0.0-beta.8-commit.151352b: + resolution: {integrity: sha512-TCb6GVaFBk4wB0LERofFDxTO5X1/Sgahr7Yn5UA9XjuFtCwL1CyEhUHX5lUIstcMxjbkLjn2z4TAGwisr6Blvw==} + hasBin: true + peerDependencies: + '@oxc-project/runtime': 0.66.0 + peerDependenciesMeta: + '@oxc-project/runtime': + optional: true + rollup-plugin-preserve-directives@0.4.0: resolution: {integrity: sha512-gx4nBxYm5BysmEQS+e2tAMrtFxrGvk+Pe5ppafRibQi0zlW7VYAbEGk6IKDw9sJGPdFWgVTE0o4BU4cdG0Fylg==} peerDependencies: @@ -8170,6 +8658,10 @@ packages: tinyexec@0.3.2: resolution: {integrity: sha512-KQQR9yN7R5+OSwaK0XQoj22pwHoTlgYqmUscPYoknOoWCWfj/5/ABTMRi69FrKU5ffPVh5QcFikpWJI/P1ocHA==} + tinyexec@1.0.2: + resolution: {integrity: sha512-W/KYk+NFhkmsYpuHq5JykngiOCnxeVL8v8dFnqxSD8qEEdRfXk1SDM6JzNqcERbcGYj9tMrDQBYV9cjgnunFIg==} + engines: {node: '>=18'} + tinyglobby@0.2.14: resolution: {integrity: sha512-tX5e7OM1HnYr2+a2C/4V0htOcSQcoSTH9KgJnVvNm5zm/cyEWKJ7j7YutsH9CxMdtOkkLFy2AHrMci9IM8IPZQ==} engines: {node: '>=12.0.0'} @@ -8256,6 +8748,19 @@ packages: typescript: optional: true + tsdown@0.9.9: + resolution: {integrity: sha512-IIGX55rkhaPomNSVrIbA58DRBwTO4ehlDTsw20XSooGqoEZbwpunDc1dRE73wKb1rHdwwBO6NMLOcgV2n1qhpA==} + engines: {node: '>=18.0.0'} + hasBin: true + peerDependencies: + publint: ^0.3.0 + unplugin-unused: ^0.4.0 + peerDependenciesMeta: + publint: + optional: true + unplugin-unused: + optional: true + tslib@1.14.1: resolution: {integrity: sha512-Xni35NKzjgMrwevysHTCArtLDpPvye8zV/0E4EyYn43P7/7qvQwPh9BGkHewbMulVntbigmcT7rdX3BNo9wRJg==} @@ -8359,9 +8864,18 @@ packages: resolution: {integrity: sha512-nWJ91DjeOkej/TA8pXQ3myruKpKEYgqvpw9lz4OPHj/NWFNluYrjbz9j01CJ8yKQd2g4jFoOkINCTW2I5LEEyw==} engines: {node: '>= 0.4'} + unconfig-core@7.4.2: + resolution: {integrity: sha512-VgPCvLWugINbXvMQDf8Jh0mlbvNjNC6eSUziHsBCMpxR05OPrNrvDnyatdMjRgcHaaNsCqz+wjNXxNw1kRLHUg==} + + unconfig@7.4.2: + resolution: {integrity: sha512-nrMlWRQ1xdTjSnSUqvYqJzbTBFugoqHobQj58B2bc8qxHKBBHMNNsWQFP3Cd3/JZK907voM2geYPWqD4VK3MPQ==} + uncrypto@0.1.3: resolution: {integrity: sha512-Ql87qFHB3s/De2ClA9e0gsnS6zXG27SkTiSJwjCc9MebbfapQfuPzumMIUMi38ezPZVNFcHI9sUIepeQfw8J8Q==} + undici-types@6.21.0: + resolution: {integrity: sha512-iwDZqg0QAGrg9Rav5H4n0M64c3mkR59cJ6wQp+7C4nI0gsmExaedaYLNO44eT4AtBBwjbTiGPMlt2Md0T9H9JQ==} + undici-types@7.14.0: resolution: {integrity: sha512-QQiYxHuyZ9gQUIrmPo3IA+hUl4KYk8uSA7cHrcKd/l3p1OTpZcM0Tbp9x7FAtXdAYhlasd60ncPpgu6ihG6TOA==} @@ -8392,6 +8906,10 @@ packages: resolution: {integrity: sha512-pjy2bYhSsufwWlKwPc+l3cN7+wuJlK6uz0YdJEOlQDbl6jo/YlPi4mb8agUkVC8BF7V8NuzeyPNqRksA3hztKQ==} engines: {node: '>= 0.8'} + unplugin-lightningcss@0.3.3: + resolution: {integrity: sha512-mMNRCNIcxc/3410w7sJdXcPxn0IGZdEpq42OBDyckdGkhOeWYZCG9RkHs72TFyBsS82a4agFDOFU8VrFKF2ZvA==} + engines: {node: '>=18.12.0'} + unplugin@2.3.10: resolution: {integrity: sha512-6NCPkv1ClwH+/BGE9QeoTIl09nuiAt0gS28nn1PvYXsGKRwM2TCbFA2QiilmehPDTXIe684k4rZI1yl3A1PCUw==} engines: {node: '>=18.12.0'} @@ -8427,6 +8945,14 @@ packages: resolution: {integrity: sha512-XQegIaBTVUjSHliKqcnFqYypAd4S+WCYt5NIeRs6w/UAry7z8Y9j5ZwRRL4kzq9U3sD6v+85er9FvkEaBpji2w==} hasBin: true + valibot@1.0.0: + resolution: {integrity: sha512-1Hc0ihzWxBar6NGeZv7fPLY0QuxFMyxwYR2sF1Blu7Wq7EnremwY2W02tit2ij2VJT8HcSkHAQqmFfl77f73Yw==} + peerDependencies: + typescript: '>=5' + peerDependenciesMeta: + typescript: + optional: true + validate-html-nesting@1.2.3: resolution: {integrity: sha512-kdkWdCl6eCeLlRShJKbjVOU2kFKxMF8Ghu50n+crEoyx+VKm3FxAxF9z4DCy6+bbTOqNW0+jcIYRnjoIRzigRw==} @@ -10300,7 +10826,7 @@ snapshots: '@grpc/grpc-js@1.9.15': dependencies: '@grpc/proto-loader': 0.7.15 - '@types/node': 24.7.0 + '@types/node': 22.19.3 '@grpc/proto-loader@0.7.15': dependencies: @@ -10717,6 +11243,8 @@ snapshots: '@tybys/wasm-util': 0.10.1 optional: true + '@neophi/sieve-cache@1.5.0': {} + '@noble/ciphers@2.0.1': {} '@noble/hashes@2.0.1': {} @@ -10814,6 +11342,8 @@ snapshots: '@opentelemetry/api@1.9.0': optional: true + '@oxc-project/types@0.66.0': {} + '@oxc-resolver/binding-android-arm-eabi@11.13.2': optional: true @@ -10823,56 +11353,129 @@ snapshots: '@oxc-resolver/binding-darwin-arm64@11.13.2': optional: true + '@oxc-resolver/binding-darwin-arm64@9.0.2': + optional: true + '@oxc-resolver/binding-darwin-x64@11.13.2': optional: true + '@oxc-resolver/binding-darwin-x64@9.0.2': + optional: true + '@oxc-resolver/binding-freebsd-x64@11.13.2': optional: true + '@oxc-resolver/binding-freebsd-x64@9.0.2': + optional: true + '@oxc-resolver/binding-linux-arm-gnueabihf@11.13.2': optional: true + '@oxc-resolver/binding-linux-arm-gnueabihf@9.0.2': + optional: true + '@oxc-resolver/binding-linux-arm-musleabihf@11.13.2': optional: true '@oxc-resolver/binding-linux-arm64-gnu@11.13.2': optional: true + '@oxc-resolver/binding-linux-arm64-gnu@9.0.2': + optional: true + '@oxc-resolver/binding-linux-arm64-musl@11.13.2': optional: true + '@oxc-resolver/binding-linux-arm64-musl@9.0.2': + optional: true + '@oxc-resolver/binding-linux-ppc64-gnu@11.13.2': optional: true '@oxc-resolver/binding-linux-riscv64-gnu@11.13.2': optional: true + '@oxc-resolver/binding-linux-riscv64-gnu@9.0.2': + optional: true + '@oxc-resolver/binding-linux-riscv64-musl@11.13.2': optional: true '@oxc-resolver/binding-linux-s390x-gnu@11.13.2': optional: true + '@oxc-resolver/binding-linux-s390x-gnu@9.0.2': + optional: true + '@oxc-resolver/binding-linux-x64-gnu@11.13.2': optional: true + '@oxc-resolver/binding-linux-x64-gnu@9.0.2': + optional: true + '@oxc-resolver/binding-linux-x64-musl@11.13.2': optional: true + '@oxc-resolver/binding-linux-x64-musl@9.0.2': + optional: true + '@oxc-resolver/binding-wasm32-wasi@11.13.2': dependencies: '@napi-rs/wasm-runtime': 1.0.7 optional: true + '@oxc-resolver/binding-wasm32-wasi@9.0.2': + dependencies: + '@napi-rs/wasm-runtime': 0.2.12 + optional: true + '@oxc-resolver/binding-win32-arm64-msvc@11.13.2': optional: true + '@oxc-resolver/binding-win32-arm64-msvc@9.0.2': + optional: true + '@oxc-resolver/binding-win32-ia32-msvc@11.13.2': optional: true '@oxc-resolver/binding-win32-x64-msvc@11.13.2': optional: true + '@oxc-resolver/binding-win32-x64-msvc@9.0.2': + optional: true + + '@oxc-transform/binding-darwin-arm64@0.67.0': + optional: true + + '@oxc-transform/binding-darwin-x64@0.67.0': + optional: true + + '@oxc-transform/binding-linux-arm-gnueabihf@0.67.0': + optional: true + + '@oxc-transform/binding-linux-arm64-gnu@0.67.0': + optional: true + + '@oxc-transform/binding-linux-arm64-musl@0.67.0': + optional: true + + '@oxc-transform/binding-linux-x64-gnu@0.67.0': + optional: true + + '@oxc-transform/binding-linux-x64-musl@0.67.0': + optional: true + + '@oxc-transform/binding-wasm32-wasi@0.67.0': + dependencies: + '@napi-rs/wasm-runtime': 0.2.12 + optional: true + + '@oxc-transform/binding-win32-arm64-msvc@0.67.0': + optional: true + + '@oxc-transform/binding-win32-x64-msvc@0.67.0': + optional: true + '@parcel/watcher-android-arm64@2.5.1': optional: true @@ -11078,12 +11681,56 @@ snapshots: '@publint/pack@0.1.2': {} + '@quansync/fs@1.0.0': + dependencies: + quansync: 1.0.0 + + '@rolldown/binding-darwin-arm64@1.0.0-beta.8-commit.151352b': + optional: true + + '@rolldown/binding-darwin-x64@1.0.0-beta.8-commit.151352b': + optional: true + + '@rolldown/binding-freebsd-x64@1.0.0-beta.8-commit.151352b': + optional: true + + '@rolldown/binding-linux-arm-gnueabihf@1.0.0-beta.8-commit.151352b': + optional: true + + '@rolldown/binding-linux-arm64-gnu@1.0.0-beta.8-commit.151352b': + optional: true + + '@rolldown/binding-linux-arm64-musl@1.0.0-beta.8-commit.151352b': + optional: true + + '@rolldown/binding-linux-x64-gnu@1.0.0-beta.8-commit.151352b': + optional: true + + '@rolldown/binding-linux-x64-musl@1.0.0-beta.8-commit.151352b': + optional: true + + '@rolldown/binding-wasm32-wasi@1.0.0-beta.8-commit.151352b': + dependencies: + '@napi-rs/wasm-runtime': 0.2.12 + optional: true + + '@rolldown/binding-win32-arm64-msvc@1.0.0-beta.8-commit.151352b': + optional: true + + '@rolldown/binding-win32-ia32-msvc@1.0.0-beta.8-commit.151352b': + optional: true + + '@rolldown/binding-win32-x64-msvc@1.0.0-beta.8-commit.151352b': + optional: true + '@rolldown/pluginutils@1.0.0-beta.40': {} '@rolldown/pluginutils@1.0.0-beta.47': {} '@rolldown/pluginutils@1.0.0-beta.50': {} + '@rolldown/pluginutils@1.0.0-beta.53': {} + '@rollup/pluginutils@5.3.0(rollup@4.52.5)': dependencies: '@types/estree': 1.0.8 @@ -11676,6 +12323,18 @@ snapshots: - csstype - solid-js + '@tanstack/react-router-devtools@1.141.2(@tanstack/react-router@1.140.0(react-dom@19.2.1(react@19.2.1))(react@19.2.1))(@tanstack/router-core@1.140.0)(csstype@3.2.3)(react-dom@19.2.1(react@19.2.1))(react@19.2.1)(solid-js@1.9.10)': + dependencies: + '@tanstack/react-router': 1.140.0(react-dom@19.2.1(react@19.2.1))(react@19.2.1) + '@tanstack/router-devtools-core': 1.141.2(@tanstack/router-core@1.140.0)(csstype@3.2.3)(solid-js@1.9.10) + react: 19.2.1 + react-dom: 19.2.1(react@19.2.1) + optionalDependencies: + '@tanstack/router-core': 1.140.0 + transitivePeerDependencies: + - csstype + - solid-js + '@tanstack/react-router-with-query@1.130.17(@tanstack/react-query@5.90.12(react@19.2.1))(@tanstack/react-router@1.140.0(react-dom@19.2.1(react@19.2.1))(react@19.2.1))(@tanstack/router-core@1.140.0)(react-dom@19.2.1(react@19.2.1))(react@19.2.1)': dependencies: '@tanstack/react-query': 5.90.12(react@19.2.1) @@ -11764,6 +12423,30 @@ snapshots: optionalDependencies: csstype: 3.2.3 + '@tanstack/router-devtools-core@1.141.2(@tanstack/router-core@1.140.0)(csstype@3.2.3)(solid-js@1.9.10)': + dependencies: + '@tanstack/router-core': 1.140.0 + clsx: 2.1.1 + goober: 2.1.16(csstype@3.2.3) + solid-js: 1.9.10 + tiny-invariant: 1.3.3 + optionalDependencies: + csstype: 3.2.3 + + '@tanstack/router-devtools@1.141.2(@tanstack/react-router@1.140.0(react-dom@19.2.1(react@19.2.1))(react@19.2.1))(@tanstack/router-core@1.140.0)(csstype@3.2.3)(react-dom@19.2.1(react@19.2.1))(react@19.2.1)(solid-js@1.9.10)': + dependencies: + '@tanstack/react-router': 1.140.0(react-dom@19.2.1(react@19.2.1))(react@19.2.1) + '@tanstack/react-router-devtools': 1.141.2(@tanstack/react-router@1.140.0(react-dom@19.2.1(react@19.2.1))(react@19.2.1))(@tanstack/router-core@1.140.0)(csstype@3.2.3)(react-dom@19.2.1(react@19.2.1))(react@19.2.1)(solid-js@1.9.10) + clsx: 2.1.1 + goober: 2.1.16(csstype@3.2.3) + react: 19.2.1 + react-dom: 19.2.1(react@19.2.1) + optionalDependencies: + csstype: 3.2.3 + transitivePeerDependencies: + - '@tanstack/router-core' + - solid-js + '@tanstack/router-generator@1.140.0': dependencies: '@tanstack/router-core': 1.140.0 @@ -12068,7 +12751,7 @@ snapshots: '@types/body-parser@1.19.6': dependencies: '@types/connect': 3.4.38 - '@types/node': 24.7.0 + '@types/node': 22.19.3 '@types/chai@5.2.2': dependencies: @@ -12080,11 +12763,11 @@ snapshots: '@types/connect@3.4.38': dependencies: - '@types/node': 24.7.0 + '@types/node': 22.19.3 '@types/conventional-commits-parser@5.0.2': dependencies: - '@types/node': 24.7.0 + '@types/node': 22.19.3 '@types/cors@2.8.19': dependencies: @@ -12100,14 +12783,14 @@ snapshots: '@types/express-serve-static-core@4.19.6': dependencies: - '@types/node': 24.7.0 + '@types/node': 22.19.3 '@types/qs': 6.14.0 '@types/range-parser': 1.2.7 '@types/send': 0.17.5 '@types/express-serve-static-core@5.0.7': dependencies: - '@types/node': 24.7.0 + '@types/node': 22.19.3 '@types/qs': 6.14.0 '@types/range-parser': 1.2.7 '@types/send': 0.17.5 @@ -12143,6 +12826,10 @@ snapshots: '@types/node@12.20.55': {} + '@types/node@22.19.3': + dependencies: + undici-types: 6.21.0 + '@types/node@24.7.0': dependencies: undici-types: 7.14.0 @@ -12168,17 +12855,17 @@ snapshots: '@types/send@0.17.5': dependencies: '@types/mime': 1.3.5 - '@types/node': 24.7.0 + '@types/node': 22.19.3 '@types/serve-static@1.15.8': dependencies: '@types/http-errors': 2.0.5 - '@types/node': 24.7.0 + '@types/node': 22.19.3 '@types/send': 0.17.5 '@types/simple-peer@9.11.9': dependencies: - '@types/node': 24.7.0 + '@types/node': 22.19.3 '@types/unist@3.0.3': {} @@ -12192,7 +12879,7 @@ snapshots: '@types/ws@8.18.1': dependencies: - '@types/node': 24.7.0 + '@types/node': 22.19.3 '@typescript-eslint/eslint-plugin@8.47.0(@typescript-eslint/parser@8.47.0(eslint@9.39.1(jiti@2.6.1))(typescript@5.9.3))(eslint@9.39.1(jiti@2.6.1))(typescript@5.9.3)': dependencies: @@ -12440,6 +13127,10 @@ snapshots: '@unrs/resolver-binding-win32-x64-msvc@1.11.1': optional: true + '@valibot/to-json-schema@1.0.0(valibot@1.0.0(typescript@5.9.3))': + dependencies: + valibot: 1.0.0(typescript@5.9.3) + '@vitejs/plugin-basic-ssl@2.1.0(vite@7.1.11(@types/node@24.7.0)(jiti@2.6.1)(lightningcss@1.30.2)(sass@1.90.0)(terser@5.44.0)(tsx@4.21.0)(yaml@2.8.1))': dependencies: vite: 7.1.11(@types/node@24.7.0)(jiti@2.6.1)(lightningcss@1.30.2)(sass@1.90.0)(terser@5.44.0)(tsx@4.21.0)(yaml@2.8.1) @@ -12456,6 +13147,18 @@ snapshots: transitivePeerDependencies: - supports-color + '@vitejs/plugin-react@5.1.2(vite@7.2.6(@types/node@24.7.0)(jiti@2.6.1)(lightningcss@1.30.2)(sass@1.90.0)(terser@5.44.0)(tsx@4.21.0)(yaml@2.8.1))': + dependencies: + '@babel/core': 7.28.5 + '@babel/plugin-transform-react-jsx-self': 7.27.1(@babel/core@7.28.5) + '@babel/plugin-transform-react-jsx-source': 7.27.1(@babel/core@7.28.5) + '@rolldown/pluginutils': 1.0.0-beta.53 + '@types/babel__core': 7.20.5 + react-refresh: 0.18.0 + vite: 7.2.6(@types/node@24.7.0)(jiti@2.6.1)(lightningcss@1.30.2)(sass@1.90.0)(terser@5.44.0)(tsx@4.21.0)(yaml@2.8.1) + transitivePeerDependencies: + - supports-color + '@vitejs/plugin-vue@6.0.2(vite@7.2.6(@types/node@24.7.0)(jiti@2.6.1)(lightningcss@1.30.2)(sass@1.90.0)(terser@5.44.0)(tsx@4.21.0)(yaml@2.8.1))(vue@3.5.25(typescript@5.9.3))': dependencies: '@rolldown/pluginutils': 1.0.0-beta.50 @@ -12721,6 +13424,8 @@ snapshots: ansi-styles@6.2.3: {} + ansis@3.17.0: {} + ansis@4.1.0: {} anymatch@3.1.3: @@ -12825,6 +13530,11 @@ snapshots: assertion-error@2.0.1: {} + ast-kit@1.4.3: + dependencies: + '@babel/parser': 7.28.5 + pathe: 2.0.3 + ast-types@0.16.1: dependencies: tslib: 2.8.1 @@ -13190,6 +13900,8 @@ snapshots: transitivePeerDependencies: - supports-color + consola@3.4.2: {} + content-disposition@0.5.4: dependencies: safe-buffer: 5.2.1 @@ -13375,6 +14087,8 @@ snapshots: di@0.0.1: {} + diff@7.0.0: {} + diff@8.0.2: {} dir-glob@3.0.1: @@ -13445,6 +14159,11 @@ snapshots: drizzle-orm: 0.45.0(@opentelemetry/api@1.9.0)(@types/pg@8.15.6)(gel@2.1.1)(kysely@0.28.8)(pg@8.16.3)(postgres@3.4.7) zod: 4.1.13 + dts-resolver@1.2.0: + dependencies: + oxc-resolver: 9.0.2 + pathe: 2.0.3 + dunder-proto@1.0.1: dependencies: call-bind-apply-helpers: 1.0.2 @@ -13463,6 +14182,8 @@ snapshots: emoji-regex@9.2.2: {} + empathic@1.1.0: {} + encodeurl@1.0.2: {} encodeurl@2.0.0: {} @@ -13486,7 +14207,7 @@ snapshots: engine.io@6.6.4: dependencies: '@types/cors': 2.8.19 - '@types/node': 24.7.0 + '@types/node': 22.19.3 accepts: 1.3.8 base64id: 2.0.0 cookie: 0.7.2 @@ -14444,6 +15165,8 @@ snapshots: he@1.2.0: {} + hookable@5.5.3: {} + hosted-git-info@8.1.0: dependencies: lru-cache: 10.4.3 @@ -15120,7 +15843,6 @@ snapshots: '@lmdb/lmdb-linux-x64': 3.4.2 '@lmdb/lmdb-win32-arm64': 3.4.2 '@lmdb/lmdb-win32-x64': 3.4.2 - optional: true local-pkg@0.5.1: dependencies: @@ -15417,7 +16139,6 @@ snapshots: msgpackr@1.11.5: optionalDependencies: msgpackr-extract: 3.0.3 - optional: true muggle-string@0.4.1: {} @@ -15450,8 +16171,7 @@ snapshots: lower-case: 2.0.2 tslib: 2.8.1 - node-addon-api@6.1.0: - optional: true + node-addon-api@6.1.0: {} node-addon-api@7.1.1: optional: true @@ -15465,7 +16185,6 @@ snapshots: node-gyp-build-optional-packages@5.2.2: dependencies: detect-libc: 2.0.4 - optional: true node-gyp@11.4.2: dependencies: @@ -15631,8 +16350,7 @@ snapshots: string-width: 7.2.0 strip-ansi: 7.1.2 - ordered-binary@1.6.0: - optional: true + ordered-binary@1.6.0: {} outdent@0.5.0: {} @@ -15664,6 +16382,35 @@ snapshots: '@oxc-resolver/binding-win32-ia32-msvc': 11.13.2 '@oxc-resolver/binding-win32-x64-msvc': 11.13.2 + oxc-resolver@9.0.2: + optionalDependencies: + '@oxc-resolver/binding-darwin-arm64': 9.0.2 + '@oxc-resolver/binding-darwin-x64': 9.0.2 + '@oxc-resolver/binding-freebsd-x64': 9.0.2 + '@oxc-resolver/binding-linux-arm-gnueabihf': 9.0.2 + '@oxc-resolver/binding-linux-arm64-gnu': 9.0.2 + '@oxc-resolver/binding-linux-arm64-musl': 9.0.2 + '@oxc-resolver/binding-linux-riscv64-gnu': 9.0.2 + '@oxc-resolver/binding-linux-s390x-gnu': 9.0.2 + '@oxc-resolver/binding-linux-x64-gnu': 9.0.2 + '@oxc-resolver/binding-linux-x64-musl': 9.0.2 + '@oxc-resolver/binding-wasm32-wasi': 9.0.2 + '@oxc-resolver/binding-win32-arm64-msvc': 9.0.2 + '@oxc-resolver/binding-win32-x64-msvc': 9.0.2 + + oxc-transform@0.67.0: + optionalDependencies: + '@oxc-transform/binding-darwin-arm64': 0.67.0 + '@oxc-transform/binding-darwin-x64': 0.67.0 + '@oxc-transform/binding-linux-arm-gnueabihf': 0.67.0 + '@oxc-transform/binding-linux-arm64-gnu': 0.67.0 + '@oxc-transform/binding-linux-arm64-musl': 0.67.0 + '@oxc-transform/binding-linux-x64-gnu': 0.67.0 + '@oxc-transform/binding-linux-x64-musl': 0.67.0 + '@oxc-transform/binding-wasm32-wasi': 0.67.0 + '@oxc-transform/binding-win32-arm64-msvc': 0.67.0 + '@oxc-transform/binding-win32-x64-msvc': 0.67.0 + p-defer@4.0.1: {} p-filter@2.1.0: @@ -15920,7 +16667,7 @@ snapshots: '@protobufjs/path': 1.1.2 '@protobufjs/pool': 1.1.0 '@protobufjs/utf8': 1.1.0 - '@types/node': 24.7.0 + '@types/node': 22.19.3 long: 5.3.2 proxy-addr@2.0.7: @@ -15964,6 +16711,8 @@ snapshots: quansync@0.2.11: {} + quansync@1.0.0: {} + queue-microtask@1.2.3: {} randombytes@2.1.0: @@ -16100,6 +16849,44 @@ snapshots: dependencies: glob: 7.2.3 + rolldown-plugin-dts@0.9.11(rolldown@1.0.0-beta.8-commit.151352b(typescript@5.9.3))(typescript@5.9.3): + dependencies: + '@babel/generator': 7.28.5 + '@babel/parser': 7.28.5 + '@babel/types': 7.28.5 + ast-kit: 1.4.3 + debug: 4.4.3 + dts-resolver: 1.2.0 + get-tsconfig: 4.10.1 + oxc-transform: 0.67.0 + rolldown: 1.0.0-beta.8-commit.151352b(typescript@5.9.3) + optionalDependencies: + typescript: 5.9.3 + transitivePeerDependencies: + - supports-color + + rolldown@1.0.0-beta.8-commit.151352b(typescript@5.9.3): + dependencies: + '@oxc-project/types': 0.66.0 + '@valibot/to-json-schema': 1.0.0(valibot@1.0.0(typescript@5.9.3)) + ansis: 3.17.0 + valibot: 1.0.0(typescript@5.9.3) + optionalDependencies: + '@rolldown/binding-darwin-arm64': 1.0.0-beta.8-commit.151352b + '@rolldown/binding-darwin-x64': 1.0.0-beta.8-commit.151352b + '@rolldown/binding-freebsd-x64': 1.0.0-beta.8-commit.151352b + '@rolldown/binding-linux-arm-gnueabihf': 1.0.0-beta.8-commit.151352b + '@rolldown/binding-linux-arm64-gnu': 1.0.0-beta.8-commit.151352b + '@rolldown/binding-linux-arm64-musl': 1.0.0-beta.8-commit.151352b + '@rolldown/binding-linux-x64-gnu': 1.0.0-beta.8-commit.151352b + '@rolldown/binding-linux-x64-musl': 1.0.0-beta.8-commit.151352b + '@rolldown/binding-wasm32-wasi': 1.0.0-beta.8-commit.151352b + '@rolldown/binding-win32-arm64-msvc': 1.0.0-beta.8-commit.151352b + '@rolldown/binding-win32-ia32-msvc': 1.0.0-beta.8-commit.151352b + '@rolldown/binding-win32-x64-msvc': 1.0.0-beta.8-commit.151352b + transitivePeerDependencies: + - typescript + rollup-plugin-preserve-directives@0.4.0(rollup@4.52.5): dependencies: '@rollup/pluginutils': 5.3.0(rollup@4.52.5) @@ -16866,6 +17653,8 @@ snapshots: tinyexec@0.3.2: {} + tinyexec@1.0.2: {} + tinyglobby@0.2.14: dependencies: fdir: 6.5.0(picomatch@4.0.3) @@ -16933,6 +17722,30 @@ snapshots: optionalDependencies: typescript: 5.9.3 + tsdown@0.9.9(publint@0.3.15)(typescript@5.9.3): + dependencies: + ansis: 3.17.0 + cac: 6.7.14 + chokidar: 4.0.3 + consola: 3.4.2 + debug: 4.4.3 + diff: 7.0.0 + empathic: 1.1.0 + hookable: 5.5.3 + lightningcss: 1.30.2 + rolldown: 1.0.0-beta.8-commit.151352b(typescript@5.9.3) + rolldown-plugin-dts: 0.9.11(rolldown@1.0.0-beta.8-commit.151352b(typescript@5.9.3))(typescript@5.9.3) + tinyexec: 1.0.2 + tinyglobby: 0.2.15 + unconfig: 7.4.2 + unplugin-lightningcss: 0.3.3 + optionalDependencies: + publint: 0.3.15 + transitivePeerDependencies: + - '@oxc-project/runtime' + - supports-color + - typescript + tslib@1.14.1: {} tslib@2.8.1: {} @@ -17054,8 +17867,23 @@ snapshots: has-symbols: 1.1.0 which-boxed-primitive: 1.1.1 + unconfig-core@7.4.2: + dependencies: + '@quansync/fs': 1.0.0 + quansync: 1.0.0 + + unconfig@7.4.2: + dependencies: + '@quansync/fs': 1.0.0 + defu: 6.1.4 + jiti: 2.6.1 + quansync: 1.0.0 + unconfig-core: 7.4.2 + uncrypto@0.1.3: {} + undici-types@6.21.0: {} + undici-types@7.14.0: {} undici@7.16.0: {} @@ -17076,6 +17904,12 @@ snapshots: unpipe@1.0.0: {} + unplugin-lightningcss@0.3.3: + dependencies: + lightningcss: 1.30.2 + magic-string: 0.30.21 + unplugin: 2.3.10 + unplugin@2.3.10: dependencies: '@jridgewell/remapping': 2.3.5 @@ -17135,6 +17969,10 @@ snapshots: uuid@13.0.0: {} + valibot@1.0.0(typescript@5.9.3): + optionalDependencies: + typescript: 5.9.3 + validate-html-nesting@1.2.3: {} validate-npm-package-license@3.0.4: @@ -17148,6 +17986,27 @@ snapshots: vary@1.1.2: {} + vite-node@3.2.4(@types/node@22.19.3)(jiti@2.6.1)(lightningcss@1.30.2)(sass@1.90.0)(terser@5.44.0)(tsx@4.21.0)(yaml@2.8.1): + dependencies: + cac: 6.7.14 + debug: 4.4.3 + es-module-lexer: 1.7.0 + pathe: 2.0.3 + vite: 7.2.6(@types/node@22.19.3)(jiti@2.6.1)(lightningcss@1.30.2)(sass@1.90.0)(terser@5.44.0)(tsx@4.21.0)(yaml@2.8.1) + transitivePeerDependencies: + - '@types/node' + - jiti + - less + - lightningcss + - sass + - sass-embedded + - stylus + - sugarss + - supports-color + - terser + - tsx + - yaml + vite-node@3.2.4(@types/node@24.7.0)(jiti@2.6.1)(lightningcss@1.30.2)(sass@1.90.0)(terser@5.44.0)(tsx@4.21.0)(yaml@2.8.1): dependencies: cac: 6.7.14 @@ -17236,6 +18095,24 @@ snapshots: tsx: 4.21.0 yaml: 2.8.1 + vite@7.2.6(@types/node@22.19.3)(jiti@2.6.1)(lightningcss@1.30.2)(sass@1.90.0)(terser@5.44.0)(tsx@4.21.0)(yaml@2.8.1): + dependencies: + esbuild: 0.25.11 + fdir: 6.5.0(picomatch@4.0.3) + picomatch: 4.0.3 + postcss: 8.5.6 + rollup: 4.52.5 + tinyglobby: 0.2.15 + optionalDependencies: + '@types/node': 22.19.3 + fsevents: 2.3.3 + jiti: 2.6.1 + lightningcss: 1.30.2 + sass: 1.90.0 + terser: 5.44.0 + tsx: 4.21.0 + yaml: 2.8.1 + vite@7.2.6(@types/node@24.7.0)(jiti@2.6.1)(lightningcss@1.30.2)(sass@1.90.0)(terser@5.44.0)(tsx@4.21.0)(yaml@2.8.1): dependencies: esbuild: 0.25.11 @@ -17258,6 +18135,50 @@ snapshots: optionalDependencies: vite: 7.2.6(@types/node@24.7.0)(jiti@2.6.1)(lightningcss@1.30.2)(sass@1.90.0)(terser@5.44.0)(tsx@4.21.0)(yaml@2.8.1) + vitest@3.2.4(@types/debug@4.1.12)(@types/node@22.19.3)(@vitest/ui@3.2.4)(jiti@2.6.1)(jsdom@27.2.0(postcss@8.5.6))(lightningcss@1.30.2)(sass@1.90.0)(terser@5.44.0)(tsx@4.21.0)(yaml@2.8.1): + dependencies: + '@types/chai': 5.2.2 + '@vitest/expect': 3.2.4 + '@vitest/mocker': 3.2.4(vite@7.2.6(@types/node@24.7.0)(jiti@2.6.1)(lightningcss@1.30.2)(sass@1.90.0)(terser@5.44.0)(tsx@4.21.0)(yaml@2.8.1)) + '@vitest/pretty-format': 3.2.4 + '@vitest/runner': 3.2.4 + '@vitest/snapshot': 3.2.4 + '@vitest/spy': 3.2.4 + '@vitest/utils': 3.2.4 + chai: 5.3.3 + debug: 4.4.3 + expect-type: 1.2.2 + magic-string: 0.30.21 + pathe: 2.0.3 + picomatch: 4.0.3 + std-env: 3.9.0 + tinybench: 2.9.0 + tinyexec: 0.3.2 + tinyglobby: 0.2.15 + tinypool: 1.1.1 + tinyrainbow: 2.0.0 + vite: 7.2.6(@types/node@22.19.3)(jiti@2.6.1)(lightningcss@1.30.2)(sass@1.90.0)(terser@5.44.0)(tsx@4.21.0)(yaml@2.8.1) + vite-node: 3.2.4(@types/node@22.19.3)(jiti@2.6.1)(lightningcss@1.30.2)(sass@1.90.0)(terser@5.44.0)(tsx@4.21.0)(yaml@2.8.1) + why-is-node-running: 2.3.0 + optionalDependencies: + '@types/debug': 4.1.12 + '@types/node': 22.19.3 + '@vitest/ui': 3.2.4(vitest@3.2.4) + jsdom: 27.2.0(postcss@8.5.6) + transitivePeerDependencies: + - jiti + - less + - lightningcss + - msw + - sass + - sass-embedded + - stylus + - sugarss + - supports-color + - terser + - tsx + - yaml + vitest@3.2.4(@types/debug@4.1.12)(@types/node@24.7.0)(@vitest/ui@3.2.4)(jiti@2.6.1)(jsdom@27.2.0(postcss@8.5.6))(lightningcss@1.30.2)(sass@1.90.0)(terser@5.44.0)(tsx@4.21.0)(yaml@2.8.1): dependencies: '@types/chai': 5.2.2 @@ -17339,8 +18260,7 @@ snapshots: glob-to-regexp: 0.4.1 graceful-fs: 4.2.11 - weak-lru-cache@1.2.2: - optional: true + weak-lru-cache@1.2.2: {} web-vitals@4.2.4: {} diff --git a/pnpm-workspace.yaml b/pnpm-workspace.yaml index 18dafa94f..b7a900cda 100644 --- a/pnpm-workspace.yaml +++ b/pnpm-workspace.yaml @@ -5,3 +5,4 @@ preferWorkspacePackages: true packages: - packages/** - examples/**/* + - ../../durable-streams/durable-streams/packages/* From c0e0807dd1fb11be43df7a279063a1738343031f Mon Sep 17 00:00:00 2001 From: "autofix-ci[bot]" <114827586+autofix-ci[bot]@users.noreply.github.com> Date: Sun, 14 Dec 2025 21:19:17 +0000 Subject: [PATCH 5/5] ci: apply automated fixes --- packages/db/src/query/builder/functions.ts | 4 +-- .../durable-stream-db-collection/README.md | 36 ++++++++++--------- .../src/collection.ts | 2 +- .../tests/collection.test.ts | 8 +++-- .../tsconfig.json | 4 ++- 5 files changed, 31 insertions(+), 23 deletions(-) diff --git a/packages/db/src/query/builder/functions.ts b/packages/db/src/query/builder/functions.ts index 11593b6b5..69af3af2b 100644 --- a/packages/db/src/query/builder/functions.ts +++ b/packages/db/src/query/builder/functions.ts @@ -330,7 +330,7 @@ export function max(arg: T): AggregateReturnType { * for proper lexicographic comparison (e.g., ISO 8601 date strings). */ export function minStr( - arg: T + arg: T, ): Aggregate { return new Aggregate(`minstr`, [toExpression(arg)]) } @@ -341,7 +341,7 @@ export function minStr( * for proper lexicographic comparison (e.g., ISO 8601 date strings). */ export function maxStr( - arg: T + arg: T, ): Aggregate { return new Aggregate(`maxstr`, [toExpression(arg)]) } diff --git a/packages/durable-stream-db-collection/README.md b/packages/durable-stream-db-collection/README.md index f87c95d8e..3514d6f53 100644 --- a/packages/durable-stream-db-collection/README.md +++ b/packages/durable-stream-db-collection/README.md @@ -21,7 +21,7 @@ const eventsCollection = createCollection( url: 'https://api.example.com/v1/stream/events', getKey: (row) => row.id, getDeduplicationKey: (row) => `${row.id}:${row.seq}`, - }) + }), ) ``` @@ -56,19 +56,19 @@ Creates TanStack DB collection configuration for a Durable Stream. ```typescript interface DurableStreamCollectionConfig { // Required - url: string // URL of the Durable Stream endpoint - getKey: (row: TRow) => string | number // Extract primary key from row + url: string // URL of the Durable Stream endpoint + getKey: (row: TRow) => string | number // Extract primary key from row getDeduplicationKey: (row: TRow) => string // Extract deduplication key from row // Optional - id?: string // Collection ID (auto-generated from URL if not provided) - schema?: StandardSchemaV1 // Standard Schema for validation - initialOffset?: string // Initial offset (default: '-1' for beginning) - headers?: Record // HTTP headers for requests - reconnectDelay?: number // Delay before reconnecting after error (default: 5000ms) - liveMode?: 'long-poll' | 'sse' // Live mode (default: 'long-poll') - storageKey?: string | false // Storage key prefix (default: 'durable-stream') - storage?: OffsetStorage // Custom storage adapter + id?: string // Collection ID (auto-generated from URL if not provided) + schema?: StandardSchemaV1 // Standard Schema for validation + initialOffset?: string // Initial offset (default: '-1' for beginning) + headers?: Record // HTTP headers for requests + reconnectDelay?: number // Delay before reconnecting after error (default: 5000ms) + liveMode?: 'long-poll' | 'sse' // Live mode (default: 'long-poll') + storageKey?: string | false // Storage key prefix (default: 'durable-stream') + storage?: OffsetStorage // Custom storage adapter } ``` @@ -100,7 +100,7 @@ const eventsCollection = createCollection( url: 'https://api.example.com/v1/stream/events', getKey: (row) => row.id, getDeduplicationKey: (row) => row.id, - }) + }), ) // Preload the collection @@ -134,7 +134,7 @@ const eventsCollection = createCollection( getKey: (row) => row.id, getDeduplicationKey: (row) => `${row.id}:${row.seq}`, schema: eventSchema, - }) + }), ) ``` @@ -147,9 +147,9 @@ const eventsCollection = createCollection( getKey: (row) => row.id, getDeduplicationKey: (row) => row.id, headers: { - 'Authorization': `Bearer ${token}`, + Authorization: `Bearer ${token}`, }, - }) + }), ) ``` @@ -165,7 +165,7 @@ const eventsCollection = createCollection( getKey: (row) => row.id, getDeduplicationKey: (row) => row.id, storage: AsyncStorage, - }) + }), ) ``` @@ -178,7 +178,7 @@ const eventsCollection = createCollection( getKey: (row) => row.id, getDeduplicationKey: (row) => row.id, storageKey: false, // No persistence - }) + }), ) ``` @@ -225,12 +225,14 @@ getDeduplicationKey: (row) => `${row.timestamp}:${row.id}` ``` The deduplication key must be: + - **Unique** within the stream - **Deterministic** - the same row always produces the same key ## Reconnection Behavior On error, the collection will: + 1. Mark as ready (if not already) to avoid blocking UI 2. Wait for `reconnectDelay` milliseconds (default: 5000) 3. Reconnect and resume from the last successful offset diff --git a/packages/durable-stream-db-collection/src/collection.ts b/packages/durable-stream-db-collection/src/collection.ts index 9ad793798..8199a9bbb 100644 --- a/packages/durable-stream-db-collection/src/collection.ts +++ b/packages/durable-stream-db-collection/src/collection.ts @@ -199,7 +199,7 @@ export function durableStreamCollectionOptions( // Create the getKey function that extracts from RowWithOffset const getKey = (row: RowWithOffset): string | number => { // Extract the original row (without offset) for the user's getKey function - + const { offset: _offset, ...originalRow } = row return config.getKey(originalRow as TRow) } diff --git a/packages/durable-stream-db-collection/tests/collection.test.ts b/packages/durable-stream-db-collection/tests/collection.test.ts index 7ccfcd4a9..2d8f61a78 100644 --- a/packages/durable-stream-db-collection/tests/collection.test.ts +++ b/packages/durable-stream-db-collection/tests/collection.test.ts @@ -30,9 +30,13 @@ vi.mock(`@durable-streams/client`, () => { }) // Helper to create an async iterator from a controller -function createMockFollowIterator(): AsyncIterable> { +function createMockFollowIterator(): AsyncIterable< + DurableStreamResult +> { const queue: Array> = [] - let resolveNext: ((value: IteratorResult>) => void) | null = null + let resolveNext: + | ((value: IteratorResult>) => void) + | null = null let isDone = false let error: Error | null = null diff --git a/packages/durable-stream-db-collection/tsconfig.json b/packages/durable-stream-db-collection/tsconfig.json index eb61958ff..0b6ae9e89 100644 --- a/packages/durable-stream-db-collection/tsconfig.json +++ b/packages/durable-stream-db-collection/tsconfig.json @@ -13,7 +13,9 @@ "paths": { "@tanstack/db-ivm": ["../db-ivm/src"], "@tanstack/db": ["../db/src"], - "@durable-streams/client": ["../../../../durable-streams/durable-streams/packages/client/src"] + "@durable-streams/client": [ + "../../../../durable-streams/durable-streams/packages/client/src" + ] } }, "include": ["src", "tests", "vite.config.ts"],