From ab2d82c757041b09ce40030f24964a3f390a16f6 Mon Sep 17 00:00:00 2001 From: Nik Date: Wed, 24 Dec 2025 10:41:54 +0100 Subject: [PATCH 1/2] feat(libsql): add Edge/Cloudflare Workers support Add edge runtime compatibility for @voltagent/libsql package, enabling use with Cloudflare Workers, Vercel Edge Functions, and other edge runtimes. Changes: - Refactor adapters into core classes with dependency injection - Add edge-specific adapters using @libsql/client/web - Add new ./edge export path for edge environments - Use DataView/ArrayBuffer in core for cross-platform compatibility - Node.js adapters override with Buffer for better performance - Update Cloudflare Workers docs with Turso/LibSQL example Usage: ```typescript import { LibSQLMemoryAdapter } from "@voltagent/libsql/edge"; const adapter = new LibSQLMemoryAdapter({ url: "libsql://your-db.turso.io", authToken: "your-token", }); ``` --- .changeset/warm-rooms-cheer.md | 22 + packages/libsql/package.json | 10 + packages/libsql/src/edge.ts | 45 + packages/libsql/src/memory-core.ts | 1225 +++++++++++++++ packages/libsql/src/memory-v2-adapter-edge.ts | 82 + packages/libsql/src/memory-v2-adapter.ts | 1391 +---------------- .../libsql/src/observability-adapter-edge.ts | 74 + packages/libsql/src/observability-adapter.ts | 999 +----------- packages/libsql/src/observability-core.ts | 880 +++++++++++ packages/libsql/src/vector-adapter-edge.ts | 77 + packages/libsql/src/vector-adapter.ts | 606 +------ packages/libsql/src/vector-core.ts | 532 +++++++ packages/libsql/tsup.config.ts | 2 +- website/deployment-docs/cloudflare-workers.md | 25 +- 14 files changed, 3040 insertions(+), 2930 deletions(-) create mode 100644 .changeset/warm-rooms-cheer.md create mode 100644 packages/libsql/src/edge.ts create mode 100644 packages/libsql/src/memory-core.ts create mode 100644 packages/libsql/src/memory-v2-adapter-edge.ts create mode 100644 packages/libsql/src/observability-adapter-edge.ts create mode 100644 packages/libsql/src/observability-core.ts create mode 100644 packages/libsql/src/vector-adapter-edge.ts create mode 100644 packages/libsql/src/vector-core.ts diff --git a/.changeset/warm-rooms-cheer.md b/.changeset/warm-rooms-cheer.md new file mode 100644 index 000000000..8b0a25f6e --- /dev/null +++ b/.changeset/warm-rooms-cheer.md @@ -0,0 +1,22 @@ +--- +"@voltagent/libsql": minor +--- + +Add Edge/Cloudflare Workers support for @voltagent/libsql + +- New `@voltagent/libsql/edge` export for edge runtimes +- Refactored adapters into core classes with dependency injection +- Edge adapters use `@libsql/client/web` for fetch-based transport +- Core uses DataView/ArrayBuffer for cross-platform compatibility +- Node.js adapters override with Buffer for better performance + +Usage: + +```typescript +import { LibSQLMemoryAdapter } from "@voltagent/libsql/edge"; + +const adapter = new LibSQLMemoryAdapter({ + url: "libsql://your-db.turso.io", + authToken: "your-token", +}); +``` diff --git a/packages/libsql/package.json b/packages/libsql/package.json index a7020d479..f28924725 100644 --- a/packages/libsql/package.json +++ b/packages/libsql/package.json @@ -22,6 +22,16 @@ "types": "./dist/index.d.ts", "default": "./dist/index.js" } + }, + "./edge": { + "import": { + "types": "./dist/edge.d.mts", + "default": "./dist/edge.mjs" + }, + "require": { + "types": "./dist/edge.d.ts", + "default": "./dist/edge.js" + } } }, "files": [ diff --git a/packages/libsql/src/edge.ts b/packages/libsql/src/edge.ts new file mode 100644 index 000000000..3a24980c5 --- /dev/null +++ b/packages/libsql/src/edge.ts @@ -0,0 +1,45 @@ +/** + * Edge/Cloudflare Workers entrypoint for @voltagent/libsql + * + * This module provides edge-compatible adapters that work with Cloudflare Workers, + * Vercel Edge Functions, and other edge runtimes. + * + * Key differences from the main entrypoint: + * - Uses @libsql/client/web instead of @libsql/client + * - Only supports remote Turso URLs (libsql://) + * - No file system operations (no local SQLite support) + * + * Usage: + * ```typescript + * import { LibSQLMemoryAdapter } from "@voltagent/libsql/edge"; + * + * const adapter = new LibSQLMemoryAdapter({ + * url: "libsql://your-db.turso.io", + * authToken: "your-token", + * }); + * ``` + */ + +// Export Edge Memory adapter (with alias for drop-in compatibility) +export { LibSQLMemoryAdapterEdge as LibSQLMemoryAdapter } from "./memory-v2-adapter-edge"; +export { LibSQLMemoryAdapterEdge } from "./memory-v2-adapter-edge"; +export type { + LibSQLMemoryEdgeOptions as LibSQLMemoryOptions, + LibSQLMemoryEdgeOptions, +} from "./memory-v2-adapter-edge"; + +// Export Edge Observability adapter (with alias for drop-in compatibility) +export { LibSQLObservabilityAdapterEdge as LibSQLObservabilityAdapter } from "./observability-adapter-edge"; +export { LibSQLObservabilityAdapterEdge } from "./observability-adapter-edge"; +export type { + LibSQLObservabilityEdgeOptions as LibSQLObservabilityOptions, + LibSQLObservabilityEdgeOptions, +} from "./observability-adapter-edge"; + +// Export Edge Vector adapter (with alias for drop-in compatibility) +export { LibSQLVectorAdapterEdge as LibSQLVectorAdapter } from "./vector-adapter-edge"; +export { LibSQLVectorAdapterEdge } from "./vector-adapter-edge"; +export type { + LibSQLVectorEdgeOptions as LibSQLVectorOptions, + LibSQLVectorEdgeOptions, +} from "./vector-adapter-edge"; diff --git a/packages/libsql/src/memory-core.ts b/packages/libsql/src/memory-core.ts new file mode 100644 index 000000000..ef4c6e412 --- /dev/null +++ b/packages/libsql/src/memory-core.ts @@ -0,0 +1,1225 @@ +/** + * LibSQL Memory Adapter Core + * Contains shared logic for both Node.js and Edge environments + * Environment-specific adapters extend this class + */ + +import type { Client } from "@libsql/client"; +import { ConversationAlreadyExistsError, ConversationNotFoundError } from "@voltagent/core"; +import type { + Conversation, + ConversationQueryOptions, + ConversationStepRecord, + CreateConversationInput, + GetConversationStepsOptions, + GetMessagesOptions, + StorageAdapter, + WorkflowStateEntry, + WorkingMemoryScope, +} from "@voltagent/core"; +import { safeStringify } from "@voltagent/internal"; +import type { Logger } from "@voltagent/logger"; +import type { UIMessage } from "ai"; + +/** + * Core configuration options for LibSQL Memory adapter + */ +export interface LibSQLMemoryCoreOptions { + /** + * Prefix for table names + * @default "voltagent_memory" + */ + tablePrefix?: string; + + /** + * Maximum number of retries for database operations + * @default 3 + */ + maxRetries?: number; + + /** + * Initial retry delay in milliseconds + * @default 100 + */ + retryDelayMs?: number; +} + +/** + * LibSQL Memory Adapter Core + * Implements all storage operations, receives client via dependency injection + */ +export class LibSQLMemoryCore implements StorageAdapter { + protected client: Client; + protected tablePrefix: string; + protected initialized = false; + protected logger: Logger; + protected maxRetries: number; + protected retryDelayMs: number; + protected url: string; + + constructor(client: Client, url: string, options: LibSQLMemoryCoreOptions, logger: Logger) { + this.client = client; + this.url = url; + this.tablePrefix = options.tablePrefix ?? "voltagent_memory"; + this.maxRetries = options.maxRetries ?? 3; + this.retryDelayMs = options.retryDelayMs ?? 100; + this.logger = logger; + + this.logger.debug("LibSQL Memory adapter core initialized", { url: this.url }); + } + + /** + * Execute a database operation with retry logic + */ + protected async executeWithRetry( + operation: () => Promise, + operationName: string, + ): Promise { + let lastError: Error | undefined; + + for (let attempt = 0; attempt < this.maxRetries; attempt++) { + try { + return await operation(); + } catch (error: any) { + lastError = error; + + if ( + error?.code === "SQLITE_BUSY" || + error?.message?.includes("SQLITE_BUSY") || + error?.message?.includes("database is locked") + ) { + const delay = this.retryDelayMs * 2 ** attempt; + this.logger.debug( + `Database busy, retrying ${operationName} (attempt ${attempt + 1}/${this.maxRetries}) after ${delay}ms`, + ); + await new Promise((resolve) => setTimeout(resolve, delay)); + } else { + throw error; + } + } + } + + this.logger.error( + `Failed to execute ${operationName} after ${this.maxRetries} attempts`, + lastError, + ); + throw lastError; + } + + /** + * Initialize database schema + */ + protected async initialize(): Promise { + if (this.initialized) return; + + const conversationsTable = `${this.tablePrefix}_conversations`; + const messagesTable = `${this.tablePrefix}_messages`; + const usersTable = `${this.tablePrefix}_users`; + const workflowStatesTable = `${this.tablePrefix}_workflow_states`; + const stepsTable = `${this.tablePrefix}_steps`; + + const isMemoryDb = this.url === ":memory:" || this.url.includes("mode=memory"); + + if (!isMemoryDb && (this.url.startsWith("file:") || this.url.startsWith("libsql:"))) { + try { + await this.client.execute("PRAGMA journal_mode=WAL"); + this.logger.debug("Set PRAGMA journal_mode=WAL"); + } catch (err) { + this.logger.debug("Failed to set PRAGMA journal_mode=WAL (non-critical)", { err }); + } + } + + try { + await this.client.execute("PRAGMA busy_timeout=5000"); + this.logger.debug("Set PRAGMA busy_timeout=5000"); + } catch (err) { + this.logger.debug("Failed to set PRAGMA busy_timeout (non-critical)", { err }); + } + + try { + await this.client.execute("PRAGMA foreign_keys=ON"); + this.logger.debug("Set PRAGMA foreign_keys=ON"); + } catch (err) { + this.logger.debug("Failed to set PRAGMA foreign_keys (non-critical)", { err }); + } + + this.logger.debug("Applied PRAGMA settings for better concurrency"); + + await this.executeWithRetry(async () => { + await this.client.batch([ + `CREATE TABLE IF NOT EXISTS ${usersTable} ( + id TEXT PRIMARY KEY, + metadata TEXT, + created_at TEXT DEFAULT CURRENT_TIMESTAMP, + updated_at TEXT DEFAULT CURRENT_TIMESTAMP + )`, + + `CREATE TABLE IF NOT EXISTS ${conversationsTable} ( + id TEXT PRIMARY KEY, + resource_id TEXT NOT NULL, + user_id TEXT NOT NULL, + title TEXT NOT NULL, + metadata TEXT NOT NULL, + created_at TEXT NOT NULL, + updated_at TEXT NOT NULL + )`, + + `CREATE TABLE IF NOT EXISTS ${messagesTable} ( + conversation_id TEXT NOT NULL, + message_id TEXT NOT NULL, + user_id TEXT NOT NULL, + role TEXT NOT NULL, + parts TEXT NOT NULL, + metadata TEXT, + format_version INTEGER DEFAULT 2, + created_at TEXT NOT NULL, + PRIMARY KEY (conversation_id, message_id) + )`, + + `CREATE TABLE IF NOT EXISTS ${workflowStatesTable} ( + id TEXT PRIMARY KEY, + workflow_id TEXT NOT NULL, + workflow_name TEXT NOT NULL, + status TEXT NOT NULL, + suspension TEXT, + events TEXT, + output TEXT, + cancellation TEXT, + user_id TEXT, + conversation_id TEXT, + metadata TEXT, + created_at TEXT NOT NULL, + updated_at TEXT NOT NULL + )`, + + `CREATE TABLE IF NOT EXISTS ${stepsTable} ( + id TEXT PRIMARY KEY, + conversation_id TEXT NOT NULL, + user_id TEXT NOT NULL, + agent_id TEXT NOT NULL, + agent_name TEXT, + operation_id TEXT, + step_index INTEGER NOT NULL, + type TEXT NOT NULL, + role TEXT NOT NULL, + content TEXT, + arguments TEXT, + result TEXT, + usage TEXT, + sub_agent_id TEXT, + sub_agent_name TEXT, + created_at TEXT NOT NULL, + FOREIGN KEY (conversation_id) REFERENCES ${conversationsTable}(id) ON DELETE CASCADE + )`, + + `CREATE INDEX IF NOT EXISTS idx_${conversationsTable}_user_id ON ${conversationsTable}(user_id)`, + `CREATE INDEX IF NOT EXISTS idx_${conversationsTable}_resource_id ON ${conversationsTable}(resource_id)`, + `CREATE INDEX IF NOT EXISTS idx_${messagesTable}_conversation_id ON ${messagesTable}(conversation_id)`, + `CREATE INDEX IF NOT EXISTS idx_${messagesTable}_created_at ON ${messagesTable}(created_at)`, + `CREATE INDEX IF NOT EXISTS idx_${workflowStatesTable}_workflow_id ON ${workflowStatesTable}(workflow_id)`, + `CREATE INDEX IF NOT EXISTS idx_${workflowStatesTable}_status ON ${workflowStatesTable}(status)`, + `CREATE INDEX IF NOT EXISTS idx_${stepsTable}_conversation ON ${stepsTable}(conversation_id, step_index)`, + `CREATE INDEX IF NOT EXISTS idx_${stepsTable}_operation ON ${stepsTable}(conversation_id, operation_id)`, + ]); + }, "initialize database schema"); + + await this.addV2ColumnsToMessagesTable(); + await this.migrateDefaultUserIds(); + await this.addWorkflowStateColumns(); + + this.initialized = true; + this.logger.debug("Database schema initialized"); + } + + private async addV2ColumnsToMessagesTable(): Promise { + const messagesTableName = `${this.tablePrefix}_messages`; + + try { + const tableInfo = await this.client.execute(`PRAGMA table_info(${messagesTableName})`); + const columns = tableInfo.rows.map((row) => row.name as string); + + if (!columns.includes("parts")) { + try { + await this.client.execute(`ALTER TABLE ${messagesTableName} ADD COLUMN parts TEXT`); + } catch (_e) { + // Column might already exist + } + } + + if (!columns.includes("metadata")) { + try { + await this.client.execute(`ALTER TABLE ${messagesTableName} ADD COLUMN metadata TEXT`); + } catch (_e) { + // Column might already exist + } + } + + if (!columns.includes("format_version")) { + try { + await this.client.execute( + `ALTER TABLE ${messagesTableName} ADD COLUMN format_version INTEGER DEFAULT 2`, + ); + } catch (_e) { + // Column might already exist + } + } + + if (!columns.includes("user_id")) { + try { + await this.client.execute( + `ALTER TABLE ${messagesTableName} ADD COLUMN user_id TEXT NOT NULL DEFAULT 'default'`, + ); + } catch (_e) { + // Column might already exist + } + } + + const contentInfo = tableInfo.rows.find((row) => row.name === "content"); + if (contentInfo && contentInfo.notnull === 1) { + try { + await this.client.execute( + `ALTER TABLE ${messagesTableName} ADD COLUMN content_temp TEXT`, + ); + await this.client.execute( + `UPDATE ${messagesTableName} SET content_temp = content WHERE content IS NOT NULL`, + ); + try { + await this.client.execute(`ALTER TABLE ${messagesTableName} DROP COLUMN content`); + await this.client.execute( + `ALTER TABLE ${messagesTableName} RENAME COLUMN content_temp TO content`, + ); + } catch (_) { + // If DROP not supported, keep both columns + } + } catch (_) { + // Content migration error - not critical + } + } + + const typeInfo = tableInfo.rows.find((row) => row.name === "type"); + if (typeInfo && typeInfo.notnull === 1) { + try { + await this.client.execute(`ALTER TABLE ${messagesTableName} ADD COLUMN type_temp TEXT`); + await this.client.execute( + `UPDATE ${messagesTableName} SET type_temp = type WHERE type IS NOT NULL`, + ); + try { + await this.client.execute(`ALTER TABLE ${messagesTableName} DROP COLUMN type`); + await this.client.execute( + `ALTER TABLE ${messagesTableName} RENAME COLUMN type_temp TO type`, + ); + } catch (_) { + // If DROP not supported, keep both columns + } + } catch (_) { + // Type migration error - not critical + } + } + } catch (_) { + // Don't throw - this is not critical for new installations + } + } + + private async migrateDefaultUserIds(): Promise { + const messagesTableName = `${this.tablePrefix}_messages`; + const conversationsTableName = `${this.tablePrefix}_conversations`; + + try { + const checkResult = await this.client.execute({ + sql: `SELECT COUNT(*) as count FROM ${messagesTableName} WHERE user_id = 'default'`, + args: [], + }); + + const defaultCount = (checkResult.rows[0]?.count as number) || 0; + + if (defaultCount === 0) { + return; + } + + this.logger.debug(`Found ${defaultCount} messages with default user_id, starting migration`); + + await this.executeWithRetry(async () => { + const result = await this.client.execute({ + sql: `UPDATE ${messagesTableName} + SET user_id = ( + SELECT c.user_id + FROM ${conversationsTableName} c + WHERE c.id = ${messagesTableName}.conversation_id + ) + WHERE user_id = 'default' + AND EXISTS ( + SELECT 1 + FROM ${conversationsTableName} c + WHERE c.id = ${messagesTableName}.conversation_id + )`, + args: [], + }); + + const updatedCount = result.rowsAffected || 0; + this.logger.info( + `Successfully migrated ${updatedCount} messages from default user_id to actual user_ids`, + ); + + const remainingResult = await this.client.execute({ + sql: `SELECT COUNT(*) as count FROM ${messagesTableName} WHERE user_id = 'default'`, + args: [], + }); + + const remainingCount = (remainingResult.rows[0]?.count as number) || 0; + + if (remainingCount > 0) { + this.logger.warn( + `${remainingCount} messages still have default user_id (possibly orphaned messages without valid conversations)`, + ); + } + }, "migrate default user_ids"); + } catch (error) { + this.logger.error("Failed to migrate default user_ids", error as Error); + } + } + + private async addWorkflowStateColumns(): Promise { + const workflowStatesTable = `${this.tablePrefix}_workflow_states`; + + try { + const tableInfo = await this.client.execute(`PRAGMA table_info(${workflowStatesTable})`); + const columns = tableInfo.rows.map((row) => row.name as string); + + if (!columns.includes("events")) { + try { + await this.client.execute(`ALTER TABLE ${workflowStatesTable} ADD COLUMN events TEXT`); + this.logger.debug("Added 'events' column to workflow_states table"); + } catch (_e) { + // Column might already exist + } + } + + if (!columns.includes("output")) { + try { + await this.client.execute(`ALTER TABLE ${workflowStatesTable} ADD COLUMN output TEXT`); + this.logger.debug("Added 'output' column to workflow_states table"); + } catch (_e) { + // Column might already exist + } + } + + if (!columns.includes("cancellation")) { + try { + await this.client.execute( + `ALTER TABLE ${workflowStatesTable} ADD COLUMN cancellation TEXT`, + ); + this.logger.debug("Added 'cancellation' column to workflow_states table"); + } catch (_e) { + // Column might already exist + } + } + } catch (error) { + this.logger.warn("Failed to add workflow state columns (non-critical)", error as Error); + } + } + + // ============================================================================ + // Message Operations + // ============================================================================ + + async addMessage(message: UIMessage, userId: string, conversationId: string): Promise { + await this.initialize(); + + const messagesTable = `${this.tablePrefix}_messages`; + + const conversation = await this.getConversation(conversationId); + if (!conversation) { + throw new ConversationNotFoundError(conversationId); + } + + await this.executeWithRetry(async () => { + await this.client.execute({ + sql: `INSERT INTO ${messagesTable} (conversation_id, message_id, user_id, role, parts, metadata, format_version, created_at) + VALUES (?, ?, ?, ?, ?, ?, ?, ?)`, + args: [ + conversationId, + message.id, + userId, + message.role, + safeStringify(message.parts), + message.metadata ? safeStringify(message.metadata) : null, + 2, + new Date().toISOString(), + ], + }); + }, "add message"); + } + + async addMessages(messages: UIMessage[], userId: string, conversationId: string): Promise { + await this.initialize(); + + const messagesTable = `${this.tablePrefix}_messages`; + + const conversation = await this.getConversation(conversationId); + if (!conversation) { + throw new ConversationNotFoundError(conversationId); + } + + const now = new Date().toISOString(); + + await this.executeWithRetry(async () => { + await this.client.batch( + messages.map((message) => ({ + sql: `INSERT INTO ${messagesTable} (conversation_id, message_id, user_id, role, parts, metadata, format_version, created_at) + VALUES (?, ?, ?, ?, ?, ?, ?, ?)`, + args: [ + conversationId, + message.id, + userId, + message.role, + safeStringify(message.parts), + message.metadata ? safeStringify(message.metadata) : null, + 2, + now, + ], + })), + ); + }, "add batch messages"); + } + + async saveConversationSteps(steps: ConversationStepRecord[]): Promise { + if (steps.length === 0) return; + + await this.initialize(); + const stepsTable = `${this.tablePrefix}_steps`; + + await this.executeWithRetry(async () => { + await this.client.batch( + steps.map((step) => { + const createdAt = step.createdAt ?? new Date().toISOString(); + return { + sql: `INSERT INTO ${stepsTable} ( + id, + conversation_id, + user_id, + agent_id, + agent_name, + operation_id, + step_index, + type, + role, + content, + arguments, + result, + usage, + sub_agent_id, + sub_agent_name, + created_at + ) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?) + ON CONFLICT(id) DO UPDATE SET + conversation_id = excluded.conversation_id, + user_id = excluded.user_id, + agent_id = excluded.agent_id, + agent_name = excluded.agent_name, + operation_id = excluded.operation_id, + step_index = excluded.step_index, + type = excluded.type, + role = excluded.role, + content = excluded.content, + arguments = excluded.arguments, + result = excluded.result, + usage = excluded.usage, + sub_agent_id = excluded.sub_agent_id, + sub_agent_name = excluded.sub_agent_name, + created_at = excluded.created_at`, + args: [ + step.id, + step.conversationId, + step.userId, + step.agentId, + step.agentName ?? null, + step.operationId ?? null, + step.stepIndex, + step.type, + step.role, + step.content ?? null, + step.arguments ? safeStringify(step.arguments) : null, + step.result ? safeStringify(step.result) : null, + step.usage ? safeStringify(step.usage) : null, + step.subAgentId ?? null, + step.subAgentName ?? null, + createdAt, + ], + }; + }), + ); + }, "save conversation steps"); + } + + async getMessages( + userId: string, + conversationId: string, + options?: GetMessagesOptions, + ): Promise[]> { + await this.initialize(); + + const messagesTable = `${this.tablePrefix}_messages`; + const { limit, before, after, roles } = options || {}; + + let sql = `SELECT * FROM ${messagesTable} + WHERE conversation_id = ? AND user_id = ?`; + const args: any[] = [conversationId, userId]; + + if (roles && roles.length > 0) { + const placeholders = roles.map(() => "?").join(","); + sql += ` AND role IN (${placeholders})`; + args.push(...roles); + } + + if (before) { + sql += " AND created_at < ?"; + args.push(before.toISOString()); + } + + if (after) { + sql += " AND created_at > ?"; + args.push(after.toISOString()); + } + + sql += " ORDER BY created_at ASC"; + if (limit && limit > 0) { + sql += " LIMIT ?"; + args.push(limit); + } + + const result = await this.client.execute({ sql, args }); + + return result.rows.map((row) => { + let parts: any; + + if (row.parts !== undefined && row.parts !== null) { + try { + parts = JSON.parse(row.parts as string); + } catch { + parts = []; + } + } else if (row.content !== undefined && row.content !== null) { + try { + const content = JSON.parse(row.content as string); + + if (typeof content === "string") { + parts = [{ type: "text", text: content }]; + } else if (Array.isArray(content)) { + parts = content; + } else { + parts = []; + } + } catch { + parts = [{ type: "text", text: row.content as string }]; + } + } else { + parts = []; + } + + const metadata = row.metadata ? JSON.parse(row.metadata as string) : {}; + return { + id: row.message_id as string, + role: row.role as "system" | "user" | "assistant", + parts, + metadata: { + ...metadata, + createdAt: row.created_at ? new Date(row.created_at as string) : undefined, + }, + }; + }); + } + + async getConversationSteps( + userId: string, + conversationId: string, + options?: GetConversationStepsOptions, + ): Promise { + await this.initialize(); + + const stepsTable = `${this.tablePrefix}_steps`; + const limit = options?.limit && options.limit > 0 ? options.limit : undefined; + + let sql = `SELECT * FROM ${stepsTable} WHERE conversation_id = ? AND user_id = ?`; + const args: any[] = [conversationId, userId]; + + if (options?.operationId) { + sql += " AND operation_id = ?"; + args.push(options.operationId); + } + + sql += " ORDER BY step_index ASC"; + if (limit !== undefined) { + sql += " LIMIT ?"; + args.push(limit); + } + + const result = await this.client.execute({ sql, args }); + + const parseJsonField = (value: unknown) => { + if (typeof value !== "string" || value.length === 0) { + return undefined; + } + try { + return JSON.parse(value); + } catch { + return undefined; + } + }; + + return result.rows.map((row) => ({ + id: row.id as string, + conversationId: row.conversation_id as string, + userId: row.user_id as string, + agentId: row.agent_id as string, + agentName: (row.agent_name as string) ?? undefined, + operationId: (row.operation_id as string) ?? undefined, + stepIndex: + typeof row.step_index === "number" + ? (row.step_index as number) + : Number(row.step_index ?? 0), + type: row.type as ConversationStepRecord["type"], + role: row.role as ConversationStepRecord["role"], + content: (row.content as string) ?? undefined, + arguments: parseJsonField(row.arguments), + result: parseJsonField(row.result), + usage: parseJsonField(row.usage), + subAgentId: (row.sub_agent_id as string) ?? undefined, + subAgentName: (row.sub_agent_name as string) ?? undefined, + createdAt: (row.created_at as string) ?? new Date().toISOString(), + })); + } + + async clearMessages(userId: string, conversationId?: string): Promise { + await this.initialize(); + + const messagesTable = `${this.tablePrefix}_messages`; + const conversationsTable = `${this.tablePrefix}_conversations`; + const stepsTable = `${this.tablePrefix}_steps`; + + if (conversationId) { + await this.client.execute({ + sql: `DELETE FROM ${messagesTable} WHERE conversation_id = ? AND user_id = ?`, + args: [conversationId, userId], + }); + await this.client.execute({ + sql: `DELETE FROM ${stepsTable} WHERE conversation_id = ? AND user_id = ?`, + args: [conversationId, userId], + }); + } else { + await this.client.execute({ + sql: `DELETE FROM ${messagesTable} + WHERE conversation_id IN ( + SELECT id FROM ${conversationsTable} WHERE user_id = ? + )`, + args: [userId], + }); + await this.client.execute({ + sql: `DELETE FROM ${stepsTable} + WHERE conversation_id IN ( + SELECT id FROM ${conversationsTable} WHERE user_id = ? + )`, + args: [userId], + }); + } + } + + // ============================================================================ + // Conversation Operations + // ============================================================================ + + async createConversation(input: CreateConversationInput): Promise { + await this.initialize(); + + const conversationsTable = `${this.tablePrefix}_conversations`; + + const existing = await this.getConversation(input.id); + if (existing) { + throw new ConversationAlreadyExistsError(input.id); + } + + const now = new Date().toISOString(); + + await this.executeWithRetry(async () => { + await this.client.execute({ + sql: `INSERT INTO ${conversationsTable} (id, resource_id, user_id, title, metadata, created_at, updated_at) + VALUES (?, ?, ?, ?, ?, ?, ?)`, + args: [ + input.id, + input.resourceId, + input.userId, + input.title, + safeStringify(input.metadata || {}), + now, + now, + ], + }); + }, "create conversation"); + + return { + id: input.id, + userId: input.userId, + resourceId: input.resourceId, + title: input.title, + metadata: input.metadata || {}, + createdAt: now, + updatedAt: now, + }; + } + + async getConversation(id: string): Promise { + await this.initialize(); + + const conversationsTable = `${this.tablePrefix}_conversations`; + + const result = await this.client.execute({ + sql: `SELECT * FROM ${conversationsTable} WHERE id = ?`, + args: [id], + }); + + if (result.rows.length === 0) { + return null; + } + + const row = result.rows[0]; + return { + id: row.id as string, + userId: row.user_id as string, + resourceId: row.resource_id as string, + title: row.title as string, + metadata: row.metadata ? JSON.parse(row.metadata as string) : {}, + createdAt: row.created_at as string, + updatedAt: row.updated_at as string, + }; + } + + async getConversations(resourceId: string): Promise { + await this.initialize(); + + const conversationsTable = `${this.tablePrefix}_conversations`; + + const result = await this.client.execute({ + sql: `SELECT * FROM ${conversationsTable} WHERE resource_id = ? ORDER BY updated_at DESC`, + args: [resourceId], + }); + + return result.rows.map((row) => ({ + id: row.id as string, + userId: row.user_id as string, + resourceId: row.resource_id as string, + title: row.title as string, + metadata: row.metadata ? JSON.parse(row.metadata as string) : {}, + createdAt: row.created_at as string, + updatedAt: row.updated_at as string, + })); + } + + async getConversationsByUserId( + userId: string, + options?: Omit, + ): Promise { + return this.queryConversations({ ...options, userId }); + } + + async queryConversations(options: ConversationQueryOptions): Promise { + await this.initialize(); + + const conversationsTable = `${this.tablePrefix}_conversations`; + let sql = `SELECT * FROM ${conversationsTable} WHERE 1=1`; + const args: any[] = []; + + if (options.userId) { + sql += " AND user_id = ?"; + args.push(options.userId); + } + + if (options.resourceId) { + sql += " AND resource_id = ?"; + args.push(options.resourceId); + } + + const orderBy = options.orderBy || "updated_at"; + const orderDirection = options.orderDirection || "DESC"; + sql += ` ORDER BY ${orderBy} ${orderDirection}`; + + if (options.limit) { + sql += " LIMIT ?"; + args.push(options.limit); + } + + if (options.offset) { + sql += " OFFSET ?"; + args.push(options.offset); + } + + const result = await this.client.execute({ sql, args }); + + return result.rows.map((row) => ({ + id: row.id as string, + userId: row.user_id as string, + resourceId: row.resource_id as string, + title: row.title as string, + metadata: row.metadata ? JSON.parse(row.metadata as string) : {}, + createdAt: row.created_at as string, + updatedAt: row.updated_at as string, + })); + } + + async updateConversation( + id: string, + updates: Partial>, + ): Promise { + await this.initialize(); + + const conversationsTable = `${this.tablePrefix}_conversations`; + const conversation = await this.getConversation(id); + if (!conversation) { + throw new ConversationNotFoundError(id); + } + + const now = new Date().toISOString(); + const fieldsToUpdate: string[] = ["updated_at = ?"]; + const args: any[] = [now]; + + if (updates.title !== undefined) { + fieldsToUpdate.push("title = ?"); + args.push(updates.title); + } + + if (updates.resourceId !== undefined) { + fieldsToUpdate.push("resource_id = ?"); + args.push(updates.resourceId); + } + + if (updates.metadata !== undefined) { + fieldsToUpdate.push("metadata = ?"); + args.push(safeStringify(updates.metadata)); + } + + args.push(id); + + await this.client.execute({ + sql: `UPDATE ${conversationsTable} SET ${fieldsToUpdate.join(", ")} WHERE id = ?`, + args, + }); + + const updated = await this.getConversation(id); + if (!updated) { + throw new Error(`Conversation not found after update: ${id}`); + } + return updated; + } + + async deleteConversation(id: string): Promise { + await this.initialize(); + + const conversationsTable = `${this.tablePrefix}_conversations`; + + await this.client.execute({ + sql: `DELETE FROM ${conversationsTable} WHERE id = ?`, + args: [id], + }); + } + + // ============================================================================ + // Working Memory Operations + // ============================================================================ + + async getWorkingMemory(params: { + conversationId?: string; + userId?: string; + scope: WorkingMemoryScope; + }): Promise { + await this.initialize(); + + if (params.scope === "conversation" && params.conversationId) { + const conversation = await this.getConversation(params.conversationId); + return (conversation?.metadata?.workingMemory as string) || null; + } + + if (params.scope === "user" && params.userId) { + const usersTable = `${this.tablePrefix}_users`; + const result = await this.client.execute({ + sql: `SELECT metadata FROM ${usersTable} WHERE id = ?`, + args: [params.userId], + }); + + if (result.rows.length > 0) { + const metadata = result.rows[0].metadata + ? JSON.parse(result.rows[0].metadata as string) + : {}; + return metadata.workingMemory || null; + } + } + + return null; + } + + async setWorkingMemory(params: { + conversationId?: string; + userId?: string; + content: string; + scope: WorkingMemoryScope; + }): Promise { + await this.initialize(); + + if (params.scope === "conversation" && params.conversationId) { + const conversation = await this.getConversation(params.conversationId); + if (!conversation) { + throw new ConversationNotFoundError(params.conversationId); + } + + const metadata = conversation.metadata || {}; + metadata.workingMemory = params.content; + + await this.updateConversation(params.conversationId, { metadata }); + } + + if (params.scope === "user" && params.userId) { + const usersTable = `${this.tablePrefix}_users`; + const now = new Date().toISOString(); + + const result = await this.client.execute({ + sql: `SELECT metadata FROM ${usersTable} WHERE id = ?`, + args: [params.userId], + }); + + if (result.rows.length > 0) { + const metadata = result.rows[0].metadata + ? JSON.parse(result.rows[0].metadata as string) + : {}; + metadata.workingMemory = params.content; + + await this.client.execute({ + sql: `UPDATE ${usersTable} SET metadata = ?, updated_at = ? WHERE id = ?`, + args: [safeStringify(metadata), now, params.userId], + }); + } else { + await this.client.execute({ + sql: `INSERT INTO ${usersTable} (id, metadata, created_at, updated_at) VALUES (?, ?, ?, ?)`, + args: [params.userId, safeStringify({ workingMemory: params.content }), now, now], + }); + } + } + } + + async deleteWorkingMemory(params: { + conversationId?: string; + userId?: string; + scope: WorkingMemoryScope; + }): Promise { + await this.initialize(); + + if (params.scope === "conversation" && params.conversationId) { + const conversation = await this.getConversation(params.conversationId); + if (conversation?.metadata?.workingMemory) { + const metadata = { ...conversation.metadata }; + // biome-ignore lint/performance/noDelete: + delete metadata.workingMemory; + await this.updateConversation(params.conversationId, { metadata }); + } + } + + if (params.scope === "user" && params.userId) { + const usersTable = `${this.tablePrefix}_users`; + const result = await this.client.execute({ + sql: `SELECT metadata FROM ${usersTable} WHERE id = ?`, + args: [params.userId], + }); + + if (result.rows.length > 0 && result.rows[0].metadata) { + const metadata = JSON.parse(result.rows[0].metadata as string); + if (metadata.workingMemory) { + // biome-ignore lint/performance/noDelete: + delete metadata.workingMemory; + await this.client.execute({ + sql: `UPDATE ${usersTable} SET metadata = ?, updated_at = ? WHERE id = ?`, + args: [safeStringify(metadata), new Date().toISOString(), params.userId], + }); + } + } + } + } + + // ============================================================================ + // Workflow State Operations + // ============================================================================ + + async getWorkflowState(executionId: string): Promise { + await this.initialize(); + + const workflowStatesTable = `${this.tablePrefix}_workflow_states`; + const result = await this.client.execute({ + sql: `SELECT * FROM ${workflowStatesTable} WHERE id = ?`, + args: [executionId], + }); + + if (result.rows.length === 0) { + return null; + } + + const row = result.rows[0]; + return { + id: row.id as string, + workflowId: row.workflow_id as string, + workflowName: row.workflow_name as string, + status: row.status as "running" | "suspended" | "completed" | "error", + suspension: row.suspension ? JSON.parse(row.suspension as string) : undefined, + events: row.events ? JSON.parse(row.events as string) : undefined, + output: row.output ? JSON.parse(row.output as string) : undefined, + cancellation: row.cancellation ? JSON.parse(row.cancellation as string) : undefined, + userId: row.user_id as string | undefined, + conversationId: row.conversation_id as string | undefined, + metadata: row.metadata ? JSON.parse(row.metadata as string) : undefined, + createdAt: new Date(row.created_at as string), + updatedAt: new Date(row.updated_at as string), + }; + } + + async queryWorkflowRuns(query: { + workflowId?: string; + status?: WorkflowStateEntry["status"]; + from?: Date; + to?: Date; + limit?: number; + offset?: number; + }): Promise { + await this.initialize(); + + const workflowStatesTable = `${this.tablePrefix}_workflow_states`; + const conditions: string[] = []; + const args: any[] = []; + + if (query.workflowId) { + conditions.push("workflow_id = ?"); + args.push(query.workflowId); + } + + if (query.status) { + conditions.push("status = ?"); + args.push(query.status); + } + + if (query.from) { + conditions.push("created_at >= ?"); + args.push(query.from.toISOString()); + } + + if (query.to) { + conditions.push("created_at <= ?"); + args.push(query.to.toISOString()); + } + + let sql = `SELECT * FROM ${workflowStatesTable}`; + if (conditions.length > 0) { + sql += ` WHERE ${conditions.join(" AND ")}`; + } + sql += " ORDER BY created_at DESC"; + + if (query.limit !== undefined) { + sql += " LIMIT ?"; + args.push(query.limit); + } + + if (query.offset !== undefined) { + sql += " OFFSET ?"; + args.push(query.offset); + } + + const result = await this.client.execute({ + sql, + args, + }); + + return result.rows.map((row) => ({ + id: row.id as string, + workflowId: row.workflow_id as string, + workflowName: row.workflow_name as string, + status: row.status as WorkflowStateEntry["status"], + suspension: row.suspension ? JSON.parse(row.suspension as string) : undefined, + events: row.events ? JSON.parse(row.events as string) : undefined, + output: row.output ? JSON.parse(row.output as string) : undefined, + cancellation: row.cancellation ? JSON.parse(row.cancellation as string) : undefined, + userId: row.user_id as string | undefined, + conversationId: row.conversation_id as string | undefined, + metadata: row.metadata ? JSON.parse(row.metadata as string) : undefined, + createdAt: new Date(row.created_at as string), + updatedAt: new Date(row.updated_at as string), + })); + } + + async setWorkflowState(executionId: string, state: WorkflowStateEntry): Promise { + await this.initialize(); + + const workflowStatesTable = `${this.tablePrefix}_workflow_states`; + await this.client.execute({ + sql: `INSERT OR REPLACE INTO ${workflowStatesTable} + (id, workflow_id, workflow_name, status, suspension, events, output, cancellation, user_id, conversation_id, metadata, created_at, updated_at) + VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)`, + args: [ + executionId, + state.workflowId, + state.workflowName, + state.status, + state.suspension ? safeStringify(state.suspension) : null, + state.events ? safeStringify(state.events) : null, + state.output ? safeStringify(state.output) : null, + state.cancellation ? safeStringify(state.cancellation) : null, + state.userId || null, + state.conversationId || null, + state.metadata ? safeStringify(state.metadata) : null, + state.createdAt.toISOString(), + state.updatedAt.toISOString(), + ], + }); + } + + async updateWorkflowState( + executionId: string, + updates: Partial, + ): Promise { + await this.initialize(); + + const existing = await this.getWorkflowState(executionId); + if (!existing) { + throw new Error(`Workflow state ${executionId} not found`); + } + + const updated: WorkflowStateEntry = { + ...existing, + ...updates, + updatedAt: new Date(), + }; + + await this.setWorkflowState(executionId, updated); + } + + async getSuspendedWorkflowStates(workflowId: string): Promise { + await this.initialize(); + + const workflowStatesTable = `${this.tablePrefix}_workflow_states`; + const result = await this.client.execute({ + sql: `SELECT * FROM ${workflowStatesTable} WHERE workflow_id = ? AND status = 'suspended' ORDER BY created_at DESC`, + args: [workflowId], + }); + + return result.rows.map((row) => ({ + id: row.id as string, + workflowId: row.workflow_id as string, + workflowName: row.workflow_name as string, + status: "suspended" as const, + suspension: row.suspension ? JSON.parse(row.suspension as string) : undefined, + events: row.events ? JSON.parse(row.events as string) : undefined, + output: row.output ? JSON.parse(row.output as string) : undefined, + cancellation: row.cancellation ? JSON.parse(row.cancellation as string) : undefined, + userId: row.user_id as string | undefined, + conversationId: row.conversation_id as string | undefined, + metadata: row.metadata ? JSON.parse(row.metadata as string) : undefined, + createdAt: new Date(row.created_at as string), + updatedAt: new Date(row.updated_at as string), + })); + } + + async close(): Promise { + this.logger.debug("Closing LibSQL Memory adapter"); + } +} diff --git a/packages/libsql/src/memory-v2-adapter-edge.ts b/packages/libsql/src/memory-v2-adapter-edge.ts new file mode 100644 index 000000000..e3b387b41 --- /dev/null +++ b/packages/libsql/src/memory-v2-adapter-edge.ts @@ -0,0 +1,82 @@ +/** + * LibSQL Storage Adapter for Memory - Edge/Cloudflare Workers + * Stores conversations and messages in remote Turso database + * Uses @libsql/client/web for edge runtime compatibility + */ + +import { createClient } from "@libsql/client/web"; +import { AgentRegistry } from "@voltagent/core"; +import { createPinoLogger } from "@voltagent/logger"; +import type { Logger } from "@voltagent/logger"; +import { LibSQLMemoryCore, type LibSQLMemoryCoreOptions } from "./memory-core"; + +/** + * LibSQL configuration options for Memory (Edge) + */ +export interface LibSQLMemoryEdgeOptions extends LibSQLMemoryCoreOptions { + /** + * Database URL - must be a remote Turso URL (libsql://) + * File-based URLs are not supported in edge environments + */ + url: string; + + /** + * Auth token for remote connections (required for Turso) + */ + authToken: string; + + /** + * Enable debug logging + * @default false + */ + debug?: boolean; + + /** + * Logger instance + */ + logger?: Logger; +} + +/** + * LibSQL Storage Adapter for Memory - Edge Compatible + * Production-ready storage for conversations and messages + * Only supports remote Turso databases (libsql://) + */ +export class LibSQLMemoryAdapterEdge extends LibSQLMemoryCore { + constructor(options: LibSQLMemoryEdgeOptions) { + // Validate URL - edge only supports remote URLs + if (!options.url) { + throw new Error("LibSQLMemoryAdapterEdge requires a url option"); + } + + if ( + options.url.startsWith("file:") || + options.url === ":memory:" || + !options.url.startsWith("libsql://") + ) { + throw new Error( + "LibSQLMemoryAdapterEdge only supports remote Turso URLs (libsql://). " + + "File-based databases are not supported in edge environments. " + + "Use LibSQLMemoryAdapter from '@voltagent/libsql' for Node.js environments.", + ); + } + + if (!options.authToken) { + throw new Error("LibSQLMemoryAdapterEdge requires an authToken for remote connections"); + } + + // Initialize logger + const logger = + options.logger || + AgentRegistry.getInstance().getGlobalLogger() || + createPinoLogger({ name: "libsql-memory-edge" }); + + // Create LibSQL client using web-compatible import + const client = createClient({ + url: options.url, + authToken: options.authToken, + }); + + super(client, options.url, options, logger); + } +} diff --git a/packages/libsql/src/memory-v2-adapter.ts b/packages/libsql/src/memory-v2-adapter.ts index f837295ad..84a195407 100644 --- a/packages/libsql/src/memory-v2-adapter.ts +++ b/packages/libsql/src/memory-v2-adapter.ts @@ -1,36 +1,21 @@ /** - * LibSQL Storage Adapter for Memory + * LibSQL Storage Adapter for Memory - Node.js * Stores conversations and messages in SQLite/Turso database - * Compatible with existing LibSQL storage structure + * Supports both local file databases and remote Turso connections */ import fs from "node:fs"; import path from "node:path"; -import { type Client, createClient } from "@libsql/client"; -import { - AgentRegistry, - ConversationAlreadyExistsError, - ConversationNotFoundError, -} from "@voltagent/core"; -import type { - Conversation, - ConversationQueryOptions, - ConversationStepRecord, - CreateConversationInput, - GetConversationStepsOptions, - GetMessagesOptions, - StorageAdapter, - WorkflowStateEntry, - WorkingMemoryScope, -} from "@voltagent/core"; -import { safeStringify } from "@voltagent/internal"; -import { type Logger, createPinoLogger } from "@voltagent/logger"; -import type { UIMessage } from "ai"; +import { createClient } from "@libsql/client"; +import { AgentRegistry } from "@voltagent/core"; +import { createPinoLogger } from "@voltagent/logger"; +import type { Logger } from "@voltagent/logger"; +import { LibSQLMemoryCore, type LibSQLMemoryCoreOptions } from "./memory-core"; /** * LibSQL configuration options for Memory */ -export interface LibSQLMemoryOptions { +export interface LibSQLMemoryOptions extends LibSQLMemoryCoreOptions { /** * Database URL (e.g., 'file:./conversations.db' or 'libsql://...') * @default "file:./.voltagent/memory.db" @@ -42,12 +27,6 @@ export interface LibSQLMemoryOptions { */ authToken?: string; - /** - * Prefix for table names - * @default "voltagent_memory" - */ - tablePrefix?: string; - /** * Enable debug logging * @default false @@ -58,1371 +37,39 @@ export interface LibSQLMemoryOptions { * Logger instance */ logger?: Logger; - - /** - * Maximum number of retries for database operations - * @default 3 - */ - maxRetries?: number; - - /** - * Initial retry delay in milliseconds - * @default 100 - */ - retryDelayMs?: number; } /** - * LibSQL Storage Adapter for Memory + * LibSQL Storage Adapter for Memory - Node.js * Production-ready storage for conversations and messages - * Compatible with existing LibSQL storage structure + * Supports both local SQLite files and remote Turso databases */ -export class LibSQLMemoryAdapter implements StorageAdapter { - private client: Client; - private tablePrefix: string; - private initialized = false; - private logger: Logger; - private maxRetries: number; - private retryDelayMs: number; - private url: string; - +export class LibSQLMemoryAdapter extends LibSQLMemoryCore { constructor(options: LibSQLMemoryOptions = {}) { - this.tablePrefix = options.tablePrefix ?? "voltagent_memory"; - this.maxRetries = options.maxRetries ?? 3; - this.retryDelayMs = options.retryDelayMs ?? 100; + const url = options.url ?? "file:./.voltagent/memory.db"; // Initialize logger - use provided logger, global logger, or create new one - this.logger = + const logger = options.logger || AgentRegistry.getInstance().getGlobalLogger() || createPinoLogger({ name: "libsql-memory" }); - this.url = options.url ?? "file:./.voltagent/memory.db"; - // Create directory for file-based databases - if (this.url.startsWith("file:")) { - const dbPath = this.url.replace("file:", ""); + if (url.startsWith("file:")) { + const dbPath = url.replace("file:", ""); const dbDir = path.dirname(dbPath); if (dbDir && dbDir !== "." && !fs.existsSync(dbDir)) { fs.mkdirSync(dbDir, { recursive: true }); - this.logger.debug(`Created database directory: ${dbDir}`); + logger.debug(`Created database directory: ${dbDir}`); } } // Create LibSQL client - this.client = createClient({ - url: this.url, + const client = createClient({ + url: url, authToken: options.authToken, }); - this.logger.debug("LibSQL Memory adapter initialized", { url: this.url }); - } - - /** - * Execute a database operation with retry logic - */ - private async executeWithRetry( - operation: () => Promise, - operationName: string, - ): Promise { - let lastError: Error | undefined; - - for (let attempt = 0; attempt < this.maxRetries; attempt++) { - try { - return await operation(); - } catch (error: any) { - lastError = error; - - // Check if error is retryable (SQLITE_BUSY, etc.) - if ( - error?.code === "SQLITE_BUSY" || - error?.message?.includes("SQLITE_BUSY") || - error?.message?.includes("database is locked") - ) { - const delay = this.retryDelayMs * 2 ** attempt; // Exponential backoff - this.logger.debug( - `Database busy, retrying ${operationName} (attempt ${attempt + 1}/${this.maxRetries}) after ${delay}ms`, - ); - await new Promise((resolve) => setTimeout(resolve, delay)); - } else { - // Non-retryable error, throw immediately - throw error; - } - } - } - - // All retries exhausted - this.logger.error( - `Failed to execute ${operationName} after ${this.maxRetries} attempts`, - lastError, - ); - throw lastError; - } - - /** - * Initialize database schema - */ - private async initialize(): Promise { - if (this.initialized) return; - - const conversationsTable = `${this.tablePrefix}_conversations`; - const messagesTable = `${this.tablePrefix}_messages`; - const usersTable = `${this.tablePrefix}_users`; - const workflowStatesTable = `${this.tablePrefix}_workflow_states`; - const stepsTable = `${this.tablePrefix}_steps`; - - // Set PRAGMA settings for better concurrency - // Execute individually to handle errors gracefully - const isMemoryDb = this.url === ":memory:" || this.url.includes("mode=memory"); - - // Only set WAL mode for file-based databases (not for in-memory) - if (!isMemoryDb && (this.url.startsWith("file:") || this.url.startsWith("libsql:"))) { - try { - await this.client.execute("PRAGMA journal_mode=WAL"); - this.logger.debug("Set PRAGMA journal_mode=WAL"); - } catch (err) { - this.logger.debug("Failed to set PRAGMA journal_mode=WAL (non-critical)", { err }); - } - } - - // Set busy timeout (works for both memory and file databases) - try { - await this.client.execute("PRAGMA busy_timeout=5000"); - this.logger.debug("Set PRAGMA busy_timeout=5000"); - } catch (err) { - this.logger.debug("Failed to set PRAGMA busy_timeout (non-critical)", { err }); - } - - // Enable foreign keys (works for both memory and file databases) - try { - await this.client.execute("PRAGMA foreign_keys=ON"); - this.logger.debug("Set PRAGMA foreign_keys=ON"); - } catch (err) { - this.logger.debug("Failed to set PRAGMA foreign_keys (non-critical)", { err }); - } - - this.logger.debug("Applied PRAGMA settings for better concurrency"); - - await this.executeWithRetry(async () => { - await this.client.batch([ - // Create users table (for user-level working memory) - `CREATE TABLE IF NOT EXISTS ${usersTable} ( - id TEXT PRIMARY KEY, - metadata TEXT, - created_at TEXT DEFAULT CURRENT_TIMESTAMP, - updated_at TEXT DEFAULT CURRENT_TIMESTAMP - )`, - - // Create conversations table (matching existing structure) - `CREATE TABLE IF NOT EXISTS ${conversationsTable} ( - id TEXT PRIMARY KEY, - resource_id TEXT NOT NULL, - user_id TEXT NOT NULL, - title TEXT NOT NULL, - metadata TEXT NOT NULL, - created_at TEXT NOT NULL, - updated_at TEXT NOT NULL - )`, - - // Create messages table (matching existing structure) - `CREATE TABLE IF NOT EXISTS ${messagesTable} ( - conversation_id TEXT NOT NULL, - message_id TEXT NOT NULL, - user_id TEXT NOT NULL, - role TEXT NOT NULL, - parts TEXT NOT NULL, - metadata TEXT, - format_version INTEGER DEFAULT 2, - created_at TEXT NOT NULL, - PRIMARY KEY (conversation_id, message_id) - )`, - - // Create workflow states table - `CREATE TABLE IF NOT EXISTS ${workflowStatesTable} ( - id TEXT PRIMARY KEY, - workflow_id TEXT NOT NULL, - workflow_name TEXT NOT NULL, - status TEXT NOT NULL, - suspension TEXT, - events TEXT, - output TEXT, - cancellation TEXT, - user_id TEXT, - conversation_id TEXT, - metadata TEXT, - created_at TEXT NOT NULL, - updated_at TEXT NOT NULL - )`, - - // Create conversation steps table - `CREATE TABLE IF NOT EXISTS ${stepsTable} ( - id TEXT PRIMARY KEY, - conversation_id TEXT NOT NULL, - user_id TEXT NOT NULL, - agent_id TEXT NOT NULL, - agent_name TEXT, - operation_id TEXT, - step_index INTEGER NOT NULL, - type TEXT NOT NULL, - role TEXT NOT NULL, - content TEXT, - arguments TEXT, - result TEXT, - usage TEXT, - sub_agent_id TEXT, - sub_agent_name TEXT, - created_at TEXT NOT NULL, - FOREIGN KEY (conversation_id) REFERENCES ${conversationsTable}(id) ON DELETE CASCADE - )`, - - // Create indexes for better performance - `CREATE INDEX IF NOT EXISTS idx_${conversationsTable}_user_id ON ${conversationsTable}(user_id)`, - `CREATE INDEX IF NOT EXISTS idx_${conversationsTable}_resource_id ON ${conversationsTable}(resource_id)`, - `CREATE INDEX IF NOT EXISTS idx_${messagesTable}_conversation_id ON ${messagesTable}(conversation_id)`, - `CREATE INDEX IF NOT EXISTS idx_${messagesTable}_created_at ON ${messagesTable}(created_at)`, - `CREATE INDEX IF NOT EXISTS idx_${workflowStatesTable}_workflow_id ON ${workflowStatesTable}(workflow_id)`, - `CREATE INDEX IF NOT EXISTS idx_${workflowStatesTable}_status ON ${workflowStatesTable}(status)`, - `CREATE INDEX IF NOT EXISTS idx_${stepsTable}_conversation ON ${stepsTable}(conversation_id, step_index)`, - `CREATE INDEX IF NOT EXISTS idx_${stepsTable}_operation ON ${stepsTable}(conversation_id, operation_id)`, - ]); - }, "initialize database schema"); - - // Add V2 columns to existing messages table if needed - await this.addV2ColumnsToMessagesTable(); - - // Migrate default user_id values to actual values from conversations - await this.migrateDefaultUserIds(); - - // Add new workflow state columns for event persistence - await this.addWorkflowStateColumns(); - - this.initialized = true; - this.logger.debug("Database schema initialized"); - } - - /** - * Add new columns to messages table for V2 format if they don't exist - * This allows existing tables to support both old and new message formats - */ - private async addV2ColumnsToMessagesTable(): Promise { - const messagesTableName = `${this.tablePrefix}_messages`; - - try { - // Check which columns exist - const tableInfo = await this.client.execute(`PRAGMA table_info(${messagesTableName})`); - const columns = tableInfo.rows.map((row) => row.name as string); - - // Step 1: Add new V2 columns if they don't exist - if (!columns.includes("parts")) { - try { - await this.client.execute(`ALTER TABLE ${messagesTableName} ADD COLUMN parts TEXT`); - } catch (_e) { - // Column might already exist - } - } - - if (!columns.includes("metadata")) { - try { - await this.client.execute(`ALTER TABLE ${messagesTableName} ADD COLUMN metadata TEXT`); - } catch (_e) { - // Column might already exist - } - } - - if (!columns.includes("format_version")) { - try { - await this.client.execute( - `ALTER TABLE ${messagesTableName} ADD COLUMN format_version INTEGER DEFAULT 2`, - ); - } catch (_e) { - // Column might already exist - } - } - - if (!columns.includes("user_id")) { - try { - await this.client.execute( - `ALTER TABLE ${messagesTableName} ADD COLUMN user_id TEXT NOT NULL DEFAULT 'default'`, - ); - } catch (_e) { - // Column might already exist - } - } - - // Step 2: Migrate old columns to nullable versions if they exist - // Check if content needs migration (check for NOT NULL constraint) - const contentInfo = tableInfo.rows.find((row) => row.name === "content"); - if (contentInfo && contentInfo.notnull === 1) { - try { - // Create nullable temp column - await this.client.execute( - `ALTER TABLE ${messagesTableName} ADD COLUMN content_temp TEXT`, - ); - - // Copy data - await this.client.execute( - `UPDATE ${messagesTableName} SET content_temp = content WHERE content IS NOT NULL`, - ); - - // Try to drop old column (SQLite 3.35.0+) - try { - await this.client.execute(`ALTER TABLE ${messagesTableName} DROP COLUMN content`); - - // If drop succeeded, rename temp to original - await this.client.execute( - `ALTER TABLE ${messagesTableName} RENAME COLUMN content_temp TO content`, - ); - } catch (_) { - // If DROP not supported, keep both columns - // Silent fail - not critical - } - } catch (_) { - // Content migration error - not critical - } - } - - // Same for type column - const typeInfo = tableInfo.rows.find((row) => row.name === "type"); - if (typeInfo && typeInfo.notnull === 1) { - try { - // Create nullable temp column - await this.client.execute(`ALTER TABLE ${messagesTableName} ADD COLUMN type_temp TEXT`); - - // Copy data - await this.client.execute( - `UPDATE ${messagesTableName} SET type_temp = type WHERE type IS NOT NULL`, - ); - - // Try to drop old column (SQLite 3.35.0+) - try { - await this.client.execute(`ALTER TABLE ${messagesTableName} DROP COLUMN type`); - - // If drop succeeded, rename temp to original - await this.client.execute( - `ALTER TABLE ${messagesTableName} RENAME COLUMN type_temp TO type`, - ); - } catch (_) { - // If DROP not supported, keep both columns - // Silent fail - not critical - } - } catch (_) { - // Type migration error - not critical - } - } - } catch (_) { - // Don't throw - this is not critical for new installations - } - } - - /** - * Migrate default user_id values in messages table - * Updates messages with user_id='default' to use the actual user_id from their conversation - */ - private async migrateDefaultUserIds(): Promise { - const messagesTableName = `${this.tablePrefix}_messages`; - const conversationsTableName = `${this.tablePrefix}_conversations`; - - try { - // First, check if there are any messages with default user_id - const checkResult = await this.client.execute({ - sql: `SELECT COUNT(*) as count FROM ${messagesTableName} WHERE user_id = 'default'`, - args: [], - }); - - const defaultCount = (checkResult.rows[0]?.count as number) || 0; - - if (defaultCount === 0) { - return; - } - - this.logger.debug(`Found ${defaultCount} messages with default user_id, starting migration`); - - // Update messages with the actual user_id from their conversation - // Using a JOIN to get the user_id from the conversations table - await this.executeWithRetry(async () => { - const result = await this.client.execute({ - sql: `UPDATE ${messagesTableName} - SET user_id = ( - SELECT c.user_id - FROM ${conversationsTableName} c - WHERE c.id = ${messagesTableName}.conversation_id - ) - WHERE user_id = 'default' - AND EXISTS ( - SELECT 1 - FROM ${conversationsTableName} c - WHERE c.id = ${messagesTableName}.conversation_id - )`, - args: [], - }); - - const updatedCount = result.rowsAffected || 0; - this.logger.info( - `Successfully migrated ${updatedCount} messages from default user_id to actual user_ids`, - ); - - // Check if there are any remaining messages with default user_id (orphaned messages) - const remainingResult = await this.client.execute({ - sql: `SELECT COUNT(*) as count FROM ${messagesTableName} WHERE user_id = 'default'`, - args: [], - }); - - const remainingCount = (remainingResult.rows[0]?.count as number) || 0; - - if (remainingCount > 0) { - this.logger.warn( - `${remainingCount} messages still have default user_id (possibly orphaned messages without valid conversations)`, - ); - } - }, "migrate default user_ids"); - } catch (error) { - // Log the error but don't throw - this migration is not critical - this.logger.error("Failed to migrate default user_ids", error as Error); - } - } - - /** - * Add new columns to workflow_states table for event persistence - * This migration adds support for events, output, and cancellation tracking - */ - private async addWorkflowStateColumns(): Promise { - const workflowStatesTable = `${this.tablePrefix}_workflow_states`; - - try { - // Check which columns exist - const tableInfo = await this.client.execute(`PRAGMA table_info(${workflowStatesTable})`); - const columns = tableInfo.rows.map((row) => row.name as string); - - // Add events column if it doesn't exist - if (!columns.includes("events")) { - try { - await this.client.execute(`ALTER TABLE ${workflowStatesTable} ADD COLUMN events TEXT`); - this.logger.debug("Added 'events' column to workflow_states table"); - } catch (_e) { - // Column might already exist - } - } - - // Add output column if it doesn't exist - if (!columns.includes("output")) { - try { - await this.client.execute(`ALTER TABLE ${workflowStatesTable} ADD COLUMN output TEXT`); - this.logger.debug("Added 'output' column to workflow_states table"); - } catch (_e) { - // Column might already exist - } - } - - // Add cancellation column if it doesn't exist - if (!columns.includes("cancellation")) { - try { - await this.client.execute( - `ALTER TABLE ${workflowStatesTable} ADD COLUMN cancellation TEXT`, - ); - this.logger.debug("Added 'cancellation' column to workflow_states table"); - } catch (_e) { - // Column might already exist - } - } - } catch (error) { - // Log the error but don't throw - existing deployments without these columns will still work - this.logger.warn("Failed to add workflow state columns (non-critical)", error as Error); - } - } - - // ============================================================================ - // Message Operations - // ============================================================================ - - /** - * Add a single message - */ - async addMessage(message: UIMessage, userId: string, conversationId: string): Promise { - await this.initialize(); - - const messagesTable = `${this.tablePrefix}_messages`; - - // Ensure conversation exists - const conversation = await this.getConversation(conversationId); - if (!conversation) { - throw new ConversationNotFoundError(conversationId); - } - - // Insert message - await this.executeWithRetry(async () => { - await this.client.execute({ - sql: `INSERT INTO ${messagesTable} (conversation_id, message_id, user_id, role, parts, metadata, format_version, created_at) - VALUES (?, ?, ?, ?, ?, ?, ?, ?)`, - args: [ - conversationId, - message.id, - userId, - message.role, - safeStringify(message.parts), - message.metadata ? safeStringify(message.metadata) : null, - 2, // format_version - new Date().toISOString(), - ], - }); - }, "add message"); - } - - /** - * Add multiple messages - */ - async addMessages(messages: UIMessage[], userId: string, conversationId: string): Promise { - await this.initialize(); - - const messagesTable = `${this.tablePrefix}_messages`; - - // Ensure conversation exists - const conversation = await this.getConversation(conversationId); - if (!conversation) { - throw new ConversationNotFoundError(conversationId); - } - - const now = new Date().toISOString(); - - // Use transaction for batch insert - await this.executeWithRetry(async () => { - await this.client.batch( - messages.map((message) => ({ - sql: `INSERT INTO ${messagesTable} (conversation_id, message_id, user_id, role, parts, metadata, format_version, created_at) - VALUES (?, ?, ?, ?, ?, ?, ?, ?)`, - args: [ - conversationId, - message.id, - userId, - message.role, - safeStringify(message.parts), - message.metadata ? safeStringify(message.metadata) : null, - 2, // format_version - now, - ], - })), - ); - }, "add batch messages"); - } - - async saveConversationSteps(steps: ConversationStepRecord[]): Promise { - if (steps.length === 0) return; - - await this.initialize(); - const stepsTable = `${this.tablePrefix}_steps`; - - await this.executeWithRetry(async () => { - await this.client.batch( - steps.map((step) => { - const createdAt = step.createdAt ?? new Date().toISOString(); - return { - sql: `INSERT INTO ${stepsTable} ( - id, - conversation_id, - user_id, - agent_id, - agent_name, - operation_id, - step_index, - type, - role, - content, - arguments, - result, - usage, - sub_agent_id, - sub_agent_name, - created_at - ) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?) - ON CONFLICT(id) DO UPDATE SET - conversation_id = excluded.conversation_id, - user_id = excluded.user_id, - agent_id = excluded.agent_id, - agent_name = excluded.agent_name, - operation_id = excluded.operation_id, - step_index = excluded.step_index, - type = excluded.type, - role = excluded.role, - content = excluded.content, - arguments = excluded.arguments, - result = excluded.result, - usage = excluded.usage, - sub_agent_id = excluded.sub_agent_id, - sub_agent_name = excluded.sub_agent_name, - created_at = excluded.created_at`, - args: [ - step.id, - step.conversationId, - step.userId, - step.agentId, - step.agentName ?? null, - step.operationId ?? null, - step.stepIndex, - step.type, - step.role, - step.content ?? null, - step.arguments ? safeStringify(step.arguments) : null, - step.result ? safeStringify(step.result) : null, - step.usage ? safeStringify(step.usage) : null, - step.subAgentId ?? null, - step.subAgentName ?? null, - createdAt, - ], - }; - }), - ); - }, "save conversation steps"); - } - - /** - * Get messages with optional filtering - */ - async getMessages( - userId: string, - conversationId: string, - options?: GetMessagesOptions, - ): Promise[]> { - await this.initialize(); - - const messagesTable = `${this.tablePrefix}_messages`; - const { limit, before, after, roles } = options || {}; - - // Build query with filters - use SELECT * to handle both old and new schemas safely - let sql = `SELECT * FROM ${messagesTable} - WHERE conversation_id = ? AND user_id = ?`; - const args: any[] = [conversationId, userId]; - - // Add role filter - if (roles && roles.length > 0) { - const placeholders = roles.map(() => "?").join(","); - sql += ` AND role IN (${placeholders})`; - args.push(...roles); - } - - // Add time filters - if (before) { - sql += " AND created_at < ?"; - args.push(before.toISOString()); - } - - if (after) { - sql += " AND created_at > ?"; - args.push(after.toISOString()); - } - - // Order by creation time and apply limit - sql += " ORDER BY created_at ASC"; - if (limit && limit > 0) { - sql += " LIMIT ?"; - args.push(limit); - } - - const result = await this.client.execute({ sql, args }); - - // Convert rows to UIMessages with on-the-fly migration for old format - return result.rows.map((row) => { - // Determine parts based on whether we have new format (parts) or old format (content) - let parts: any; - - // Check for new format first (parts column exists and has value) - if (row.parts !== undefined && row.parts !== null) { - // New format - parse parts directly - try { - parts = JSON.parse(row.parts as string); - } catch { - parts = []; - } - } - // Check for old format (content column exists and has value) - else if (row.content !== undefined && row.content !== null) { - // Old format - convert content to parts - try { - const content = JSON.parse(row.content as string); - - if (typeof content === "string") { - // Simple string content -> text part - parts = [{ type: "text", text: content }]; - } else if (Array.isArray(content)) { - // Already an array of parts (old BaseMessage format with MessageContent array) - parts = content; - } else { - // Unknown format - fallback to empty - parts = []; - } - } catch { - // If parsing fails, treat as plain text - parts = [{ type: "text", text: row.content as string }]; - } - } else { - // No content at all - empty parts - parts = []; - } - - const metadata = row.metadata ? JSON.parse(row.metadata as string) : {}; - return { - id: row.message_id as string, - role: row.role as "system" | "user" | "assistant", - parts, - metadata: { - ...metadata, - createdAt: row.created_at ? new Date(row.created_at as string) : undefined, - }, - }; - }); - } - - async getConversationSteps( - userId: string, - conversationId: string, - options?: GetConversationStepsOptions, - ): Promise { - await this.initialize(); - - const stepsTable = `${this.tablePrefix}_steps`; - const limit = options?.limit && options.limit > 0 ? options.limit : undefined; - - let sql = `SELECT * FROM ${stepsTable} WHERE conversation_id = ? AND user_id = ?`; - const args: any[] = [conversationId, userId]; - - if (options?.operationId) { - sql += " AND operation_id = ?"; - args.push(options.operationId); - } - - sql += " ORDER BY step_index ASC"; - if (limit !== undefined) { - sql += " LIMIT ?"; - args.push(limit); - } - - const result = await this.client.execute({ sql, args }); - - const parseJsonField = (value: unknown) => { - if (typeof value !== "string" || value.length === 0) { - return undefined; - } - try { - return JSON.parse(value); - } catch { - return undefined; - } - }; - - return result.rows.map((row) => ({ - id: row.id as string, - conversationId: row.conversation_id as string, - userId: row.user_id as string, - agentId: row.agent_id as string, - agentName: (row.agent_name as string) ?? undefined, - operationId: (row.operation_id as string) ?? undefined, - stepIndex: - typeof row.step_index === "number" - ? (row.step_index as number) - : Number(row.step_index ?? 0), - type: row.type as ConversationStepRecord["type"], - role: row.role as ConversationStepRecord["role"], - content: (row.content as string) ?? undefined, - arguments: parseJsonField(row.arguments), - result: parseJsonField(row.result), - usage: parseJsonField(row.usage), - subAgentId: (row.sub_agent_id as string) ?? undefined, - subAgentName: (row.sub_agent_name as string) ?? undefined, - createdAt: (row.created_at as string) ?? new Date().toISOString(), - })); - } - - /** - * Clear messages for a user - */ - async clearMessages(userId: string, conversationId?: string): Promise { - await this.initialize(); - - const messagesTable = `${this.tablePrefix}_messages`; - const conversationsTable = `${this.tablePrefix}_conversations`; - const stepsTable = `${this.tablePrefix}_steps`; - - if (conversationId) { - // Clear messages for specific conversation - await this.client.execute({ - sql: `DELETE FROM ${messagesTable} WHERE conversation_id = ? AND user_id = ?`, - args: [conversationId, userId], - }); - await this.client.execute({ - sql: `DELETE FROM ${stepsTable} WHERE conversation_id = ? AND user_id = ?`, - args: [conversationId, userId], - }); - } else { - // Clear all messages for the user - await this.client.execute({ - sql: `DELETE FROM ${messagesTable} - WHERE conversation_id IN ( - SELECT id FROM ${conversationsTable} WHERE user_id = ? - )`, - args: [userId], - }); - await this.client.execute({ - sql: `DELETE FROM ${stepsTable} - WHERE conversation_id IN ( - SELECT id FROM ${conversationsTable} WHERE user_id = ? - )`, - args: [userId], - }); - } - } - - // ============================================================================ - // Conversation Operations - // ============================================================================ - - /** - * Create a new conversation - */ - async createConversation(input: CreateConversationInput): Promise { - await this.initialize(); - - const conversationsTable = `${this.tablePrefix}_conversations`; - - // Check if conversation already exists - const existing = await this.getConversation(input.id); - if (existing) { - throw new ConversationAlreadyExistsError(input.id); - } - - const now = new Date().toISOString(); - - await this.executeWithRetry(async () => { - await this.client.execute({ - sql: `INSERT INTO ${conversationsTable} (id, resource_id, user_id, title, metadata, created_at, updated_at) - VALUES (?, ?, ?, ?, ?, ?, ?)`, - args: [ - input.id, - input.resourceId, - input.userId, - input.title, - safeStringify(input.metadata || {}), - now, - now, - ], - }); - }, "create conversation"); - - return { - id: input.id, - userId: input.userId, - resourceId: input.resourceId, - title: input.title, - metadata: input.metadata || {}, - createdAt: now, - updatedAt: now, - }; - } - - /** - * Get a conversation by ID - */ - async getConversation(id: string): Promise { - await this.initialize(); - - const conversationsTable = `${this.tablePrefix}_conversations`; - - const result = await this.client.execute({ - sql: `SELECT * FROM ${conversationsTable} WHERE id = ?`, - args: [id], - }); - - if (result.rows.length === 0) { - return null; - } - - const row = result.rows[0]; - return { - id: row.id as string, - userId: row.user_id as string, - resourceId: row.resource_id as string, - title: row.title as string, - metadata: row.metadata ? JSON.parse(row.metadata as string) : {}, - createdAt: row.created_at as string, - updatedAt: row.updated_at as string, - }; - } - - /** - * Get conversations by resource ID - */ - async getConversations(resourceId: string): Promise { - await this.initialize(); - - const conversationsTable = `${this.tablePrefix}_conversations`; - - const result = await this.client.execute({ - sql: `SELECT * FROM ${conversationsTable} WHERE resource_id = ? ORDER BY updated_at DESC`, - args: [resourceId], - }); - - return result.rows.map((row) => ({ - id: row.id as string, - userId: row.user_id as string, - resourceId: row.resource_id as string, - title: row.title as string, - metadata: row.metadata ? JSON.parse(row.metadata as string) : {}, - createdAt: row.created_at as string, - updatedAt: row.updated_at as string, - })); - } - - /** - * Get conversations by user ID - */ - async getConversationsByUserId( - userId: string, - options?: Omit, - ): Promise { - return this.queryConversations({ ...options, userId }); - } - - /** - * Query conversations with filters - */ - async queryConversations(options: ConversationQueryOptions): Promise { - await this.initialize(); - - const conversationsTable = `${this.tablePrefix}_conversations`; - let sql = `SELECT * FROM ${conversationsTable} WHERE 1=1`; - const args: any[] = []; - - // Add filters - if (options.userId) { - sql += " AND user_id = ?"; - args.push(options.userId); - } - - if (options.resourceId) { - sql += " AND resource_id = ?"; - args.push(options.resourceId); - } - - // Add ordering - const orderBy = options.orderBy || "updated_at"; - const orderDirection = options.orderDirection || "DESC"; - sql += ` ORDER BY ${orderBy} ${orderDirection}`; - - // Add pagination - if (options.limit) { - sql += " LIMIT ?"; - args.push(options.limit); - } - - if (options.offset) { - sql += " OFFSET ?"; - args.push(options.offset); - } - - const result = await this.client.execute({ sql, args }); - - return result.rows.map((row) => ({ - id: row.id as string, - userId: row.user_id as string, - resourceId: row.resource_id as string, - title: row.title as string, - metadata: row.metadata ? JSON.parse(row.metadata as string) : {}, - createdAt: row.created_at as string, - updatedAt: row.updated_at as string, - })); - } - - /** - * Update a conversation - */ - async updateConversation( - id: string, - updates: Partial>, - ): Promise { - await this.initialize(); - - const conversationsTable = `${this.tablePrefix}_conversations`; - const conversation = await this.getConversation(id); - if (!conversation) { - throw new ConversationNotFoundError(id); - } - - const now = new Date().toISOString(); - const fieldsToUpdate: string[] = ["updated_at = ?"]; - const args: any[] = [now]; - - if (updates.title !== undefined) { - fieldsToUpdate.push("title = ?"); - args.push(updates.title); - } - - if (updates.resourceId !== undefined) { - fieldsToUpdate.push("resource_id = ?"); - args.push(updates.resourceId); - } - - if (updates.metadata !== undefined) { - fieldsToUpdate.push("metadata = ?"); - args.push(safeStringify(updates.metadata)); - } - - args.push(id); // WHERE clause - - await this.client.execute({ - sql: `UPDATE ${conversationsTable} SET ${fieldsToUpdate.join(", ")} WHERE id = ?`, - args, - }); - - const updated = await this.getConversation(id); - if (!updated) { - throw new Error(`Conversation not found after update: ${id}`); - } - return updated; - } - - /** - * Delete a conversation - */ - async deleteConversation(id: string): Promise { - await this.initialize(); - - const conversationsTable = `${this.tablePrefix}_conversations`; - - await this.client.execute({ - sql: `DELETE FROM ${conversationsTable} WHERE id = ?`, - args: [id], - }); - } - - // ============================================================================ - // Working Memory Operations - // ============================================================================ - - /** - * Get working memory - */ - async getWorkingMemory(params: { - conversationId?: string; - userId?: string; - scope: WorkingMemoryScope; - }): Promise { - await this.initialize(); - - if (params.scope === "conversation" && params.conversationId) { - const conversation = await this.getConversation(params.conversationId); - return (conversation?.metadata?.workingMemory as string) || null; - } - - if (params.scope === "user" && params.userId) { - const usersTable = `${this.tablePrefix}_users`; - const result = await this.client.execute({ - sql: `SELECT metadata FROM ${usersTable} WHERE id = ?`, - args: [params.userId], - }); - - if (result.rows.length > 0) { - const metadata = result.rows[0].metadata - ? JSON.parse(result.rows[0].metadata as string) - : {}; - return metadata.workingMemory || null; - } - } - - return null; - } - - /** - * Set working memory - */ - async setWorkingMemory(params: { - conversationId?: string; - userId?: string; - content: string; - scope: WorkingMemoryScope; - }): Promise { - await this.initialize(); - - if (params.scope === "conversation" && params.conversationId) { - const conversation = await this.getConversation(params.conversationId); - if (!conversation) { - throw new ConversationNotFoundError(params.conversationId); - } - - const metadata = conversation.metadata || {}; - metadata.workingMemory = params.content; - - await this.updateConversation(params.conversationId, { metadata }); - } - - if (params.scope === "user" && params.userId) { - const usersTable = `${this.tablePrefix}_users`; - const now = new Date().toISOString(); - - // Check if user exists - const result = await this.client.execute({ - sql: `SELECT metadata FROM ${usersTable} WHERE id = ?`, - args: [params.userId], - }); - - if (result.rows.length > 0) { - // User exists, update metadata - const metadata = result.rows[0].metadata - ? JSON.parse(result.rows[0].metadata as string) - : {}; - metadata.workingMemory = params.content; - - await this.client.execute({ - sql: `UPDATE ${usersTable} SET metadata = ?, updated_at = ? WHERE id = ?`, - args: [safeStringify(metadata), now, params.userId], - }); - } else { - // User doesn't exist, create new record - await this.client.execute({ - sql: `INSERT INTO ${usersTable} (id, metadata, created_at, updated_at) VALUES (?, ?, ?, ?)`, - args: [params.userId, safeStringify({ workingMemory: params.content }), now, now], - }); - } - } - } - - /** - * Delete working memory - */ - async deleteWorkingMemory(params: { - conversationId?: string; - userId?: string; - scope: WorkingMemoryScope; - }): Promise { - await this.initialize(); - - if (params.scope === "conversation" && params.conversationId) { - const conversation = await this.getConversation(params.conversationId); - if (conversation?.metadata?.workingMemory) { - const metadata = { ...conversation.metadata }; - // biome-ignore lint/performance/noDelete: - delete metadata.workingMemory; - await this.updateConversation(params.conversationId, { metadata }); - } - } - - if (params.scope === "user" && params.userId) { - const usersTable = `${this.tablePrefix}_users`; - const result = await this.client.execute({ - sql: `SELECT metadata FROM ${usersTable} WHERE id = ?`, - args: [params.userId], - }); - - if (result.rows.length > 0 && result.rows[0].metadata) { - const metadata = JSON.parse(result.rows[0].metadata as string); - if (metadata.workingMemory) { - // biome-ignore lint/performance/noDelete: - delete metadata.workingMemory; - await this.client.execute({ - sql: `UPDATE ${usersTable} SET metadata = ?, updated_at = ? WHERE id = ?`, - args: [safeStringify(metadata), new Date().toISOString(), params.userId], - }); - } - } - } - } - - // ============================================================================ - // Workflow State Operations - // ============================================================================ - - /** - * Get workflow state by execution ID - */ - async getWorkflowState(executionId: string): Promise { - await this.initialize(); - - const workflowStatesTable = `${this.tablePrefix}_workflow_states`; - const result = await this.client.execute({ - sql: `SELECT * FROM ${workflowStatesTable} WHERE id = ?`, - args: [executionId], - }); - - if (result.rows.length === 0) { - return null; - } - - const row = result.rows[0]; - return { - id: row.id as string, - workflowId: row.workflow_id as string, - workflowName: row.workflow_name as string, - status: row.status as "running" | "suspended" | "completed" | "error", - suspension: row.suspension ? JSON.parse(row.suspension as string) : undefined, - events: row.events ? JSON.parse(row.events as string) : undefined, - output: row.output ? JSON.parse(row.output as string) : undefined, - cancellation: row.cancellation ? JSON.parse(row.cancellation as string) : undefined, - userId: row.user_id as string | undefined, - conversationId: row.conversation_id as string | undefined, - metadata: row.metadata ? JSON.parse(row.metadata as string) : undefined, - createdAt: new Date(row.created_at as string), - updatedAt: new Date(row.updated_at as string), - }; - } - - /** - * Query workflow states with optional filters - */ - async queryWorkflowRuns(query: { - workflowId?: string; - status?: WorkflowStateEntry["status"]; - from?: Date; - to?: Date; - limit?: number; - offset?: number; - }): Promise { - await this.initialize(); - - const workflowStatesTable = `${this.tablePrefix}_workflow_states`; - const conditions: string[] = []; - const args: any[] = []; - - if (query.workflowId) { - conditions.push("workflow_id = ?"); - args.push(query.workflowId); - } - - if (query.status) { - conditions.push("status = ?"); - args.push(query.status); - } - - if (query.from) { - conditions.push("created_at >= ?"); - args.push(query.from.toISOString()); - } - - if (query.to) { - conditions.push("created_at <= ?"); - args.push(query.to.toISOString()); - } - - let sql = `SELECT * FROM ${workflowStatesTable}`; - if (conditions.length > 0) { - sql += ` WHERE ${conditions.join(" AND ")}`; - } - sql += " ORDER BY created_at DESC"; - - if (query.limit !== undefined) { - sql += " LIMIT ?"; - args.push(query.limit); - } - - if (query.offset !== undefined) { - sql += " OFFSET ?"; - args.push(query.offset); - } - - const result = await this.client.execute({ - sql, - args, - }); - - return result.rows.map((row) => ({ - id: row.id as string, - workflowId: row.workflow_id as string, - workflowName: row.workflow_name as string, - status: row.status as WorkflowStateEntry["status"], - suspension: row.suspension ? JSON.parse(row.suspension as string) : undefined, - events: row.events ? JSON.parse(row.events as string) : undefined, - output: row.output ? JSON.parse(row.output as string) : undefined, - cancellation: row.cancellation ? JSON.parse(row.cancellation as string) : undefined, - userId: row.user_id as string | undefined, - conversationId: row.conversation_id as string | undefined, - metadata: row.metadata ? JSON.parse(row.metadata as string) : undefined, - createdAt: new Date(row.created_at as string), - updatedAt: new Date(row.updated_at as string), - })); - } - - /** - * Set workflow state - */ - async setWorkflowState(executionId: string, state: WorkflowStateEntry): Promise { - await this.initialize(); - - const workflowStatesTable = `${this.tablePrefix}_workflow_states`; - await this.client.execute({ - sql: `INSERT OR REPLACE INTO ${workflowStatesTable} - (id, workflow_id, workflow_name, status, suspension, events, output, cancellation, user_id, conversation_id, metadata, created_at, updated_at) - VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)`, - args: [ - executionId, - state.workflowId, - state.workflowName, - state.status, - state.suspension ? safeStringify(state.suspension) : null, - state.events ? safeStringify(state.events) : null, - state.output ? safeStringify(state.output) : null, - state.cancellation ? safeStringify(state.cancellation) : null, - state.userId || null, - state.conversationId || null, - state.metadata ? safeStringify(state.metadata) : null, - state.createdAt.toISOString(), - state.updatedAt.toISOString(), - ], - }); - } - - /** - * Update workflow state - */ - async updateWorkflowState( - executionId: string, - updates: Partial, - ): Promise { - await this.initialize(); - - const existing = await this.getWorkflowState(executionId); - if (!existing) { - throw new Error(`Workflow state ${executionId} not found`); - } - - const updated: WorkflowStateEntry = { - ...existing, - ...updates, - updatedAt: new Date(), - }; - - await this.setWorkflowState(executionId, updated); - } - - /** - * Get suspended workflow states for a workflow - */ - async getSuspendedWorkflowStates(workflowId: string): Promise { - await this.initialize(); - - const workflowStatesTable = `${this.tablePrefix}_workflow_states`; - const result = await this.client.execute({ - sql: `SELECT * FROM ${workflowStatesTable} WHERE workflow_id = ? AND status = 'suspended' ORDER BY created_at DESC`, - args: [workflowId], - }); - - return result.rows.map((row) => ({ - id: row.id as string, - workflowId: row.workflow_id as string, - workflowName: row.workflow_name as string, - status: "suspended" as const, - suspension: row.suspension ? JSON.parse(row.suspension as string) : undefined, - events: row.events ? JSON.parse(row.events as string) : undefined, - output: row.output ? JSON.parse(row.output as string) : undefined, - cancellation: row.cancellation ? JSON.parse(row.cancellation as string) : undefined, - userId: row.user_id as string | undefined, - conversationId: row.conversation_id as string | undefined, - metadata: row.metadata ? JSON.parse(row.metadata as string) : undefined, - createdAt: new Date(row.created_at as string), - updatedAt: new Date(row.updated_at as string), - })); - } - - /** - * Close database connection - */ - async close(): Promise { - // LibSQL client doesn't have explicit close method - // Connection is managed automatically - this.logger.debug("Closing LibSQL Memory adapter"); + super(client, url, options, logger); } } diff --git a/packages/libsql/src/observability-adapter-edge.ts b/packages/libsql/src/observability-adapter-edge.ts new file mode 100644 index 000000000..28d8d0c00 --- /dev/null +++ b/packages/libsql/src/observability-adapter-edge.ts @@ -0,0 +1,74 @@ +/** + * LibSQL Observability Adapter - Edge/Cloudflare Workers + * Provides persistent storage for OpenTelemetry spans using remote Turso database + * Uses @libsql/client/web for edge runtime compatibility + */ + +import { createClient } from "@libsql/client/web"; +import { createPinoLogger } from "@voltagent/logger"; +import type { Logger } from "@voltagent/logger"; +import { LibSQLObservabilityCore, type LibSQLObservabilityCoreOptions } from "./observability-core"; + +/** + * Options for configuring the LibSQLObservabilityAdapterEdge + */ +export interface LibSQLObservabilityEdgeOptions extends LibSQLObservabilityCoreOptions { + /** + * LibSQL connection URL - must be a remote Turso URL (libsql://) + * File-based URLs are not supported in edge environments + */ + url: string; + + /** + * Auth token for LibSQL/Turso (required for remote connections) + */ + authToken: string; + + /** + * Optional logger instance + */ + logger?: Logger; +} + +/** + * LibSQL Observability Adapter - Edge Compatible + * Provides observability storage using remote Turso database + * Only supports remote Turso databases (libsql://) + */ +export class LibSQLObservabilityAdapterEdge extends LibSQLObservabilityCore { + constructor(options: LibSQLObservabilityEdgeOptions) { + // Validate URL - edge only supports remote URLs + if (!options.url) { + throw new Error("LibSQLObservabilityAdapterEdge requires a url option"); + } + + if ( + options.url.startsWith("file:") || + options.url === ":memory:" || + !options.url.startsWith("libsql://") + ) { + throw new Error( + "LibSQLObservabilityAdapterEdge only supports remote Turso URLs (libsql://). " + + "File-based databases are not supported in edge environments. " + + "Use LibSQLObservabilityAdapter from '@voltagent/libsql' for Node.js environments.", + ); + } + + if (!options.authToken) { + throw new Error( + "LibSQLObservabilityAdapterEdge requires an authToken for remote connections", + ); + } + + // Initialize the logger + const logger = options.logger || createPinoLogger({ name: "libsql-observability-edge" }); + + // Initialize the LibSQL client using web-compatible import + const client = createClient({ + url: options.url, + authToken: options.authToken, + }); + + super(client, options, logger); + } +} diff --git a/packages/libsql/src/observability-adapter.ts b/packages/libsql/src/observability-adapter.ts index 277880272..e385ddcbe 100644 --- a/packages/libsql/src/observability-adapter.ts +++ b/packages/libsql/src/observability-adapter.ts @@ -1,33 +1,23 @@ /** - * LibSQL Observability Adapter + * LibSQL Observability Adapter - Node.js * Provides persistent storage for OpenTelemetry spans using LibSQL/Turso database - * Part of the OpenTelemetry observability migration (Phase 3) */ import { existsSync, mkdirSync } from "node:fs"; import { dirname } from "node:path"; -import type { Client } from "@libsql/client"; import { createClient } from "@libsql/client"; -import type { - LogFilter, - ObservabilityLogRecord, - ObservabilitySpan, - ObservabilityStorageAdapter, -} from "@voltagent/core"; -import { safeStringify } from "@voltagent/internal/utils"; -import { type Logger, createPinoLogger } from "@voltagent/logger"; +import { createPinoLogger } from "@voltagent/logger"; +import type { Logger } from "@voltagent/logger"; +import { LibSQLObservabilityCore, type LibSQLObservabilityCoreOptions } from "./observability-core"; /** * Options for configuring the LibSQLObservabilityAdapter */ -export interface LibSQLObservabilityOptions { +export interface LibSQLObservabilityOptions extends LibSQLObservabilityCoreOptions { /** * LibSQL connection URL * Can be either a remote Turso URL or a local file path * @default "file:./.voltagent/observability.db" - * @example "libsql://your-database.turso.io" for remote Turso - * @example "file:observability.db" for local SQLite in current directory - * @example "file:.voltagent/observability.db" for local SQLite in .voltagent folder */ url?: string; @@ -37,999 +27,46 @@ export interface LibSQLObservabilityOptions { */ authToken?: string; - /** - * Prefix for table names - * @default "observability" - */ - tablePrefix?: string; - - /** - * Whether to enable debug logging - * @default false - */ - debug?: boolean; - /** * Optional logger instance */ logger?: Logger; - - /** - * Maximum number of spans to return in a single query - * @default 1000 - */ - maxSpansPerQuery?: number; } /** - * LibSQL Observability Adapter + * LibSQL Observability Adapter - Node.js * Provides observability storage using LibSQL/Turso database - * Implements the ObservabilityStorageAdapter interface for OpenTelemetry spans + * Supports both local SQLite files and remote Turso databases */ -export class LibSQLObservabilityAdapter implements ObservabilityStorageAdapter { - private client: Client; - private tablePrefix: string; - private debug: boolean; - private logger: Logger; - private initialized: Promise; - private maxSpansPerQuery: number; - +export class LibSQLObservabilityAdapter extends LibSQLObservabilityCore { constructor(options: LibSQLObservabilityOptions = {}) { - // Initialize the logger - this.logger = options.logger || createPinoLogger({ name: "libsql-observability" }); - - this.tablePrefix = options.tablePrefix || "observability"; - this.debug = options.debug || false; - this.maxSpansPerQuery = options.maxSpansPerQuery || 1000; const url = options.url || "file:./.voltagent/observability.db"; + // Initialize the logger + const logger = options.logger || createPinoLogger({ name: "libsql-observability" }); + // Ensure parent directory exists for file-based databases if (url.startsWith("file:") && !url.includes(":memory:")) { - const filePath = url.substring(5); // Remove 'file:' prefix + const filePath = url.substring(5); const dir = dirname(filePath); if (dir && dir !== "." && !existsSync(dir)) { try { mkdirSync(dir, { recursive: true }); - this.debugLog("Created directory for database", { dir }); + if (options.debug) { + logger.debug("Created directory for database", { dir }); + } } catch (error) { - this.logger.warn("Failed to create directory for database", { dir, error }); + logger.warn("Failed to create directory for database", { dir, error }); } } } // Initialize the LibSQL client - this.client = createClient({ + const client = createClient({ url, authToken: options.authToken, }); - this.debugLog("LibSQL observability adapter initialized with options", { - url, - tablePrefix: this.tablePrefix, - debug: this.debug, - maxSpansPerQuery: this.maxSpansPerQuery, - }); - - // Initialize the database tables - this.initialized = this.initializeDatabase(); - } - - /** - * Log a debug message if debug is enabled - */ - private debugLog(message: string, data?: unknown): void { - if (this.debug) { - this.logger.debug(`${message}`, data || ""); - } - } - - /** - * Initialize database tables for observability - */ - private async initializeDatabase(): Promise { - try { - // Create main spans table with entity columns - await this.client.execute(` - CREATE TABLE IF NOT EXISTS ${this.tablePrefix}_spans ( - span_id TEXT PRIMARY KEY, - trace_id TEXT NOT NULL, - parent_span_id TEXT, - entity_id TEXT, - entity_type TEXT, - name TEXT NOT NULL, - kind INTEGER DEFAULT 0, - start_time TEXT NOT NULL, - end_time TEXT, - duration REAL, - status_code INTEGER DEFAULT 0, - status_message TEXT, - attributes TEXT, - events TEXT, - links TEXT, - resource TEXT, - instrumentation_scope TEXT, - created_at TEXT DEFAULT CURRENT_TIMESTAMP, - updated_at TEXT DEFAULT CURRENT_TIMESTAMP - ) - `); - - // Create indexes for efficient queries - await this.client.execute(` - CREATE INDEX IF NOT EXISTS idx_${this.tablePrefix}_spans_trace_id - ON ${this.tablePrefix}_spans(trace_id) - `); - - await this.client.execute(` - CREATE INDEX IF NOT EXISTS idx_${this.tablePrefix}_spans_parent_span_id - ON ${this.tablePrefix}_spans(parent_span_id) - `); - - await this.client.execute(` - CREATE INDEX IF NOT EXISTS idx_${this.tablePrefix}_spans_start_time - ON ${this.tablePrefix}_spans(start_time) - `); - - await this.client.execute(` - CREATE INDEX IF NOT EXISTS idx_${this.tablePrefix}_spans_name - ON ${this.tablePrefix}_spans(name) - `); - - await this.client.execute(` - CREATE INDEX IF NOT EXISTS idx_${this.tablePrefix}_spans_entity_id - ON ${this.tablePrefix}_spans(entity_id) - `); - - await this.client.execute(` - CREATE INDEX IF NOT EXISTS idx_${this.tablePrefix}_spans_entity_type - ON ${this.tablePrefix}_spans(entity_type) - `); - - // Create trace metadata table for fast trace listing with entity columns - await this.client.execute(` - CREATE TABLE IF NOT EXISTS ${this.tablePrefix}_traces ( - trace_id TEXT PRIMARY KEY, - root_span_id TEXT, - entity_id TEXT, - entity_type TEXT, - start_time TEXT NOT NULL, - end_time TEXT, - span_count INTEGER DEFAULT 1, - created_at TEXT DEFAULT CURRENT_TIMESTAMP, - updated_at TEXT DEFAULT CURRENT_TIMESTAMP - ) - `); - - await this.client.execute(` - CREATE INDEX IF NOT EXISTS idx_${this.tablePrefix}_traces_start_time - ON ${this.tablePrefix}_traces(start_time DESC) - `); - - await this.client.execute(` - CREATE INDEX IF NOT EXISTS idx_${this.tablePrefix}_traces_entity_id - ON ${this.tablePrefix}_traces(entity_id) - `); - - await this.client.execute(` - CREATE INDEX IF NOT EXISTS idx_${this.tablePrefix}_traces_entity_type - ON ${this.tablePrefix}_traces(entity_type) - `); - - // Create logs table - await this.client.execute(` - CREATE TABLE IF NOT EXISTS ${this.tablePrefix}_logs ( - id INTEGER PRIMARY KEY AUTOINCREMENT, - timestamp TEXT NOT NULL, - trace_id TEXT, - span_id TEXT, - trace_flags INTEGER, - severity_number INTEGER, - severity_text TEXT, - body TEXT NOT NULL, - attributes TEXT, - resource TEXT, - instrumentation_scope TEXT, - created_at TEXT DEFAULT CURRENT_TIMESTAMP - ) - `); - - // Create indexes for logs - await this.client.execute(` - CREATE INDEX IF NOT EXISTS idx_${this.tablePrefix}_logs_trace_id - ON ${this.tablePrefix}_logs(trace_id) - `); - - await this.client.execute(` - CREATE INDEX IF NOT EXISTS idx_${this.tablePrefix}_logs_span_id - ON ${this.tablePrefix}_logs(span_id) - `); - - await this.client.execute(` - CREATE INDEX IF NOT EXISTS idx_${this.tablePrefix}_logs_timestamp - ON ${this.tablePrefix}_logs(timestamp DESC) - `); - - await this.client.execute(` - CREATE INDEX IF NOT EXISTS idx_${this.tablePrefix}_logs_severity - ON ${this.tablePrefix}_logs(severity_number) - `); - - this.debugLog("Database tables initialized successfully"); - } catch (error) { - this.logger.error("Failed to initialize database tables", { error }); - throw error; - } - } - - /** - * Ensure database is initialized before operations - */ - private async ensureInitialized(): Promise { - await this.initialized; - } - - /** - * Add a span to the database - */ - async addSpan(span: ObservabilitySpan): Promise { - await this.ensureInitialized(); - - try { - // Extract entity information from attributes - const entityId = (span.attributes?.["entity.id"] as string) || null; - const entityType = (span.attributes?.["entity.type"] as string) || null; - - // Start a transaction for consistency - await this.client.batch([ - // Insert the span with entity columns - { - sql: ` - INSERT INTO ${this.tablePrefix}_spans ( - span_id, trace_id, parent_span_id, entity_id, entity_type, name, kind, - start_time, end_time, duration, - status_code, status_message, - attributes, events, links, - resource, instrumentation_scope - ) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?) - `, - args: [ - span.spanId, - span.traceId, - span.parentSpanId || null, - entityId, - entityType, - span.name, - span.kind, - span.startTime, - span.endTime || null, - span.duration || null, - span.status.code, - span.status.message || null, - safeStringify(span.attributes), - safeStringify(span.events), - span.links ? safeStringify(span.links) : null, - span.resource ? safeStringify(span.resource) : null, - span.instrumentationScope ? safeStringify(span.instrumentationScope) : null, - ], - }, - // Update or insert trace metadata with entity columns - { - sql: ` - INSERT INTO ${this.tablePrefix}_traces ( - trace_id, root_span_id, entity_id, entity_type, start_time, end_time, span_count - ) VALUES (?, ?, ?, ?, ?, ?, 1) - ON CONFLICT(trace_id) DO UPDATE SET - span_count = span_count + 1, - entity_id = COALESCE(excluded.entity_id, entity_id), - entity_type = COALESCE(excluded.entity_type, entity_type), - start_time = MIN(start_time, excluded.start_time), - end_time = MAX(COALESCE(end_time, excluded.end_time), excluded.end_time), - updated_at = CURRENT_TIMESTAMP - `, - args: [ - span.traceId, - span.parentSpanId ? null : span.spanId, // Root span if no parent - entityId, - entityType, - span.startTime, - span.endTime || null, - ], - }, - ]); - - this.debugLog("Span added successfully", { - spanId: span.spanId, - traceId: span.traceId, - }); - } catch (error) { - this.logger.error("Failed to add span", { error, span }); - throw error; - } - } - - /** - * Update an existing span - */ - async updateSpan(spanId: string, updates: Partial): Promise { - await this.ensureInitialized(); - - try { - const setClauses: string[] = []; - const args: any[] = []; - - // Build dynamic SET clause based on provided updates - if (updates.endTime !== undefined) { - setClauses.push("end_time = ?"); - args.push(updates.endTime); - } - if (updates.duration !== undefined) { - setClauses.push("duration = ?"); - args.push(updates.duration); - } - if (updates.status !== undefined) { - setClauses.push("status_code = ?, status_message = ?"); - args.push(updates.status.code, updates.status.message || null); - } - if (updates.attributes !== undefined) { - setClauses.push("attributes = ?"); - args.push(safeStringify(updates.attributes)); - } - if (updates.events !== undefined) { - setClauses.push("events = ?"); - args.push(safeStringify(updates.events)); - } - if (updates.links !== undefined) { - setClauses.push("links = ?"); - args.push(safeStringify(updates.links)); - } - - if (setClauses.length === 0) { - return; // Nothing to update - } - - setClauses.push("updated_at = CURRENT_TIMESTAMP"); - args.push(spanId); - - await this.client.execute({ - sql: ` - UPDATE ${this.tablePrefix}_spans - SET ${setClauses.join(", ")} - WHERE span_id = ? - `, - args, - }); - - // If endTime was updated, also update trace metadata - if (updates.endTime) { - const span = await this.getSpan(spanId); - if (span) { - await this.client.execute({ - sql: ` - UPDATE ${this.tablePrefix}_traces - SET end_time = MAX(COALESCE(end_time, ?), ?), - updated_at = CURRENT_TIMESTAMP - WHERE trace_id = ? - `, - args: [updates.endTime, updates.endTime, span.traceId], - }); - } - } - - this.debugLog("Span updated successfully", { spanId, updates }); - } catch (error) { - this.logger.error("Failed to update span", { error, spanId, updates }); - throw error; - } - } - - /** - * Get a span by ID - */ - async getSpan(spanId: string): Promise { - await this.ensureInitialized(); - - try { - const result = await this.client.execute({ - sql: ` - SELECT * FROM ${this.tablePrefix}_spans - WHERE span_id = ? - `, - args: [spanId], - }); - - if (result.rows.length === 0) { - return null; - } - - const row = result.rows[0]; - return this.rowToSpan(row); - } catch (error) { - this.logger.error("Failed to get span", { error, spanId }); - throw error; - } - } - - /** - * Get all spans in a trace - */ - async getTrace(traceId: string): Promise { - await this.ensureInitialized(); - - try { - const result = await this.client.execute({ - sql: ` - SELECT * FROM ${this.tablePrefix}_spans - WHERE trace_id = ? - ORDER BY start_time ASC - LIMIT ? - `, - args: [traceId, this.maxSpansPerQuery], - }); - - return result.rows.map((row) => this.rowToSpan(row)); - } catch (error) { - this.logger.error("Failed to get trace", { error, traceId }); - throw error; - } - } - - /** - * List all traces with optional entity filter - */ - async listTraces( - limit = 100, - offset = 0, - filter?: { - entityId?: string; - entityType?: "agent" | "workflow"; - }, - ): Promise { - await this.ensureInitialized(); - - try { - let sql: string; - let args: any[] = []; - const conditions: string[] = []; - - if (filter?.entityId) { - conditions.push("entity_id = ?"); - args.push(filter.entityId); - } - - if (filter?.entityType) { - conditions.push("entity_type = ?"); - args.push(filter.entityType); - } - - if (conditions.length > 0) { - // Filter by entity - sql = ` - SELECT trace_id FROM ${this.tablePrefix}_traces - WHERE ${conditions.join(" AND ")} - ORDER BY start_time DESC - LIMIT ? OFFSET ? - `; - args.push(limit, offset); - } else { - // Get all traces - sql = ` - SELECT trace_id FROM ${this.tablePrefix}_traces - ORDER BY start_time DESC - LIMIT ? OFFSET ? - `; - args = [limit, offset]; - } - - const result = await this.client.execute({ sql, args }); - return result.rows.map((row) => row.trace_id as string); - } catch (error) { - this.logger.error("Failed to list traces", { error, limit, offset, filter }); - throw error; - } - } - - /** - * Delete old spans - */ - async deleteOldSpans(beforeTimestamp: number): Promise { - await this.ensureInitialized(); - - try { - const beforeDate = new Date(beforeTimestamp).toISOString(); - - // Get affected trace IDs before deletion - const tracesResult = await this.client.execute({ - sql: ` - SELECT DISTINCT trace_id FROM ${this.tablePrefix}_spans - WHERE start_time < ? - `, - args: [beforeDate], - }); - - const affectedTraceIds = tracesResult.rows.map((row) => row.trace_id as string); - - // Delete old spans - const deleteResult = await this.client.execute({ - sql: ` - DELETE FROM ${this.tablePrefix}_spans - WHERE start_time < ? - `, - args: [beforeDate], - }); - - // Clean up trace metadata - if (affectedTraceIds.length > 0) { - // Update span counts for affected traces - for (const traceId of affectedTraceIds) { - const countResult = await this.client.execute({ - sql: ` - SELECT COUNT(*) as count FROM ${this.tablePrefix}_spans - WHERE trace_id = ? - `, - args: [traceId], - }); - - const count = countResult.rows[0].count as number; - if (count === 0) { - // Delete trace metadata if no spans remain - await this.client.execute({ - sql: ` - DELETE FROM ${this.tablePrefix}_traces - WHERE trace_id = ? - `, - args: [traceId], - }); - } else { - // Update span count - await this.client.execute({ - sql: ` - UPDATE ${this.tablePrefix}_traces - SET span_count = ?, - updated_at = CURRENT_TIMESTAMP - WHERE trace_id = ? - `, - args: [count, traceId], - }); - } - } - } - - const deletedCount = deleteResult.rowsAffected || 0; - this.debugLog("Old spans deleted", { deletedCount, beforeDate }); - return deletedCount; - } catch (error) { - this.logger.error("Failed to delete old spans", { error, beforeTimestamp }); - throw error; - } - } - - /** - * Clear all spans, traces, and logs - */ - async clear(): Promise { - await this.ensureInitialized(); - - try { - await this.client.batch([ - { sql: `DELETE FROM ${this.tablePrefix}_spans`, args: [] }, - { sql: `DELETE FROM ${this.tablePrefix}_traces`, args: [] }, - { sql: `DELETE FROM ${this.tablePrefix}_logs`, args: [] }, - ]); - - this.debugLog("All spans, traces, and logs cleared"); - } catch (error) { - this.logger.error("Failed to clear data", { error }); - throw error; - } - } - - /** - * Convert a database row to an ObservabilitySpan - */ - private rowToSpan(row: any): ObservabilitySpan { - const span: ObservabilitySpan = { - traceId: row.trace_id as string, - spanId: row.span_id as string, - name: row.name as string, - kind: row.kind as number, - startTime: row.start_time as string, - status: { - code: row.status_code as number, - }, - attributes: row.attributes ? JSON.parse(row.attributes as string) : {}, - events: row.events ? JSON.parse(row.events as string) : [], - }; - - // Add optional fields only if they have values (not null) - if (row.parent_span_id !== null) { - span.parentSpanId = row.parent_span_id as string; - } - if (row.end_time !== null) { - span.endTime = row.end_time as string; - } - if (row.duration !== null) { - span.duration = row.duration as number; - } - if (row.status_message !== null) { - span.status.message = row.status_message as string; - } - if (row.links && row.links !== "null") { - const links = JSON.parse(row.links as string); - if (links && links.length > 0) { - span.links = links; - } - } - if (row.resource && row.resource !== "null") { - const resource = JSON.parse(row.resource as string); - if (resource && Object.keys(resource).length > 0) { - span.resource = resource; - } - } - if (row.instrumentation_scope && row.instrumentation_scope !== "null") { - const scope = JSON.parse(row.instrumentation_scope as string); - if (scope) { - span.instrumentationScope = scope; - } - } - - return span; - } - - /** - * Get statistics about stored spans - */ - async getStats(): Promise<{ - spanCount: number; - traceCount: number; - oldestSpan?: Date; - newestSpan?: Date; - }> { - await this.ensureInitialized(); - - try { - const [spanCountResult, traceCountResult, timeRangeResult] = await Promise.all([ - this.client.execute(`SELECT COUNT(*) as count FROM ${this.tablePrefix}_spans`), - this.client.execute(`SELECT COUNT(*) as count FROM ${this.tablePrefix}_traces`), - this.client.execute(` - SELECT - MIN(start_time) as oldest, - MAX(start_time) as newest - FROM ${this.tablePrefix}_spans - `), - ]); - - const stats: any = { - spanCount: spanCountResult.rows[0].count as number, - traceCount: traceCountResult.rows[0].count as number, - }; - - if (timeRangeResult.rows[0].oldest) { - stats.oldestSpan = new Date(timeRangeResult.rows[0].oldest as string); - } - if (timeRangeResult.rows[0].newest) { - stats.newestSpan = new Date(timeRangeResult.rows[0].newest as string); - } - - return stats; - } catch (error) { - this.logger.error("Failed to get stats", { error }); - throw error; - } - } - - /** - * Save a log record to the database - */ - async saveLogRecord(logRecord: any): Promise { - await this.ensureInitialized(); - - try { - // Convert timestamp if it's an array (OpenTelemetry HrTime format) - let timestamp: string; - if (Array.isArray(logRecord.hrTime)) { - const timeMs = logRecord.hrTime[0] * 1000 + logRecord.hrTime[1] / 1000000; - timestamp = new Date(timeMs).toISOString(); - } else if (logRecord.timestamp) { - timestamp = - typeof logRecord.timestamp === "string" - ? logRecord.timestamp - : new Date(logRecord.timestamp).toISOString(); - } else { - timestamp = new Date().toISOString(); - } - - // Extract trace context - const spanContext = logRecord.spanContext || {}; - const traceId = spanContext.traceId || null; - const spanId = spanContext.spanId || null; - const traceFlags = spanContext.traceFlags ?? null; - - // Extract log data - const severityNumber = logRecord.severityNumber ?? null; - const severityText = logRecord.severityText || null; - const body = - typeof logRecord.body === "string" ? logRecord.body : safeStringify(logRecord.body); - const attributes = logRecord.attributes ? safeStringify(logRecord.attributes) : null; - const resource = logRecord.resource?.attributes - ? safeStringify(logRecord.resource.attributes) - : null; - const instrumentationScope = - logRecord.instrumentationLibrary || logRecord.instrumentationScope - ? safeStringify(logRecord.instrumentationLibrary || logRecord.instrumentationScope) - : null; - - await this.client.execute({ - sql: ` - INSERT INTO ${this.tablePrefix}_logs ( - timestamp, trace_id, span_id, trace_flags, - severity_number, severity_text, body, - attributes, resource, instrumentation_scope - ) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?) - `, - args: [ - timestamp, - traceId, - spanId, - traceFlags, - severityNumber, - severityText, - body, - attributes, - resource, - instrumentationScope, - ], - }); - - this.debugLog("Log record saved successfully", { - timestamp, - traceId, - spanId, - severityNumber, - }); - } catch (error) { - this.logger.error("Failed to save log record", { error, logRecord }); - throw error; - } - } - - /** - * Get logs by trace ID - */ - async getLogsByTraceId(traceId: string): Promise { - await this.ensureInitialized(); - - try { - const result = await this.client.execute({ - sql: ` - SELECT * FROM ${this.tablePrefix}_logs - WHERE trace_id = ? - ORDER BY timestamp DESC - LIMIT ? - `, - args: [traceId, this.maxSpansPerQuery], - }); - - return result.rows.map((row) => this.rowToLogRecord(row)); - } catch (error) { - this.logger.error("Failed to get logs by trace ID", { error, traceId }); - throw error; - } - } - - /** - * Get logs by span ID - */ - async getLogsBySpanId(spanId: string): Promise { - await this.ensureInitialized(); - - try { - const result = await this.client.execute({ - sql: ` - SELECT * FROM ${this.tablePrefix}_logs - WHERE span_id = ? - ORDER BY timestamp DESC - LIMIT ? - `, - args: [spanId, this.maxSpansPerQuery], - }); - - return result.rows.map((row) => this.rowToLogRecord(row)); - } catch (error) { - this.logger.error("Failed to get logs by span ID", { error, spanId }); - throw error; - } - } - - /** - * Query logs with flexible filtering - */ - async queryLogs(filter: LogFilter): Promise { - await this.ensureInitialized(); - - try { - const whereClauses: string[] = []; - const args: any[] = []; - - if (filter.traceId) { - whereClauses.push("trace_id = ?"); - args.push(filter.traceId); - } - if (filter.spanId) { - whereClauses.push("span_id = ?"); - args.push(filter.spanId); - } - if (filter.severityNumber !== undefined) { - whereClauses.push("severity_number >= ?"); - args.push(filter.severityNumber); - } - if (filter.severityText) { - whereClauses.push("severity_text = ?"); - args.push(filter.severityText); - } - if (filter.instrumentationScope) { - whereClauses.push("instrumentation_scope LIKE ?"); - args.push(`%${filter.instrumentationScope}%`); - } - if (filter.startTimeMin !== undefined) { - const minTime = new Date(filter.startTimeMin).toISOString(); - whereClauses.push("timestamp >= ?"); - args.push(minTime); - } - if (filter.startTimeMax !== undefined) { - const maxTime = new Date(filter.startTimeMax).toISOString(); - whereClauses.push("timestamp <= ?"); - args.push(maxTime); - } - if (filter.bodyContains) { - whereClauses.push("body LIKE ?"); - args.push(`%${filter.bodyContains}%`); - } - - const whereClause = whereClauses.length > 0 ? `WHERE ${whereClauses.join(" AND ")}` : ""; - - const limit = filter.limit || this.maxSpansPerQuery; - args.push(limit); - - const result = await this.client.execute({ - sql: ` - SELECT * FROM ${this.tablePrefix}_logs - ${whereClause} - ORDER BY timestamp DESC - LIMIT ? - `, - args, - }); - - const logs = result.rows.map((row) => this.rowToLogRecord(row)); - - // Filter by attributes if specified - if (filter.attributeKey) { - const key = filter.attributeKey; - return logs.filter((log) => { - if (!log.attributes) return false; - if (filter.attributeValue !== undefined) { - return log.attributes[key] === filter.attributeValue; - } - return key in log.attributes; - }); - } - - return logs; - } catch (error) { - this.logger.error("Failed to query logs", { error, filter }); - throw error; - } - } - - /** - * Delete old logs - */ - async deleteOldLogs(beforeTimestamp: number): Promise { - await this.ensureInitialized(); - - try { - const beforeDate = new Date(beforeTimestamp).toISOString(); - - const result = await this.client.execute({ - sql: ` - DELETE FROM ${this.tablePrefix}_logs - WHERE timestamp < ? - `, - args: [beforeDate], - }); - - const deletedCount = result.rowsAffected || 0; - this.debugLog("Old logs deleted", { deletedCount, beforeDate }); - return deletedCount; - } catch (error) { - this.logger.error("Failed to delete old logs", { error, beforeTimestamp }); - throw error; - } - } - - /** - * Convert a database row to an ObservabilityLogRecord - */ - private rowToLogRecord(row: any): ObservabilityLogRecord { - const log: ObservabilityLogRecord = { - timestamp: row.timestamp as string, - body: (() => { - try { - // Only parse if it looks like JSON and can actually be parsed - const bodyStr = row.body as string; - if (bodyStr.startsWith("{") || bodyStr.startsWith("[")) { - return JSON.parse(bodyStr); - } - } catch { - // If parsing fails, treat as string - } - return row.body as string; - })(), - }; - - // Add optional fields only if they have values (not null) - if (row.trace_id !== null) { - log.traceId = row.trace_id as string; - } - if (row.span_id !== null) { - log.spanId = row.span_id as string; - } - if (row.trace_flags !== null) { - log.traceFlags = row.trace_flags as number; - } - if (row.severity_number !== null) { - log.severityNumber = row.severity_number as number; - } - if (row.severity_text !== null) { - log.severityText = row.severity_text as string; - } - if (row.attributes && row.attributes !== "null") { - try { - const attributes = JSON.parse(row.attributes as string); - if (attributes && Object.keys(attributes).length > 0) { - log.attributes = attributes; - } - } catch { - // Skip if parsing fails - } - } - if (row.resource && row.resource !== "null") { - try { - const resource = JSON.parse(row.resource as string); - if (resource && Object.keys(resource).length > 0) { - log.resource = resource; - } - } catch { - // Skip if parsing fails - } - } - if (row.instrumentation_scope && row.instrumentation_scope !== "null") { - try { - const scope = JSON.parse(row.instrumentation_scope as string); - if (scope) { - log.instrumentationScope = scope; - } - } catch { - // Skip if parsing fails - } - } - - return log; - } - - getInfo() { - return { - adapter: this.constructor.name, - displayName: "LibSQL Observability Storage", - persistent: true, - description: "Persists spans and logs to a LibSQL/Turso database for long-term retention.", - }; - } - - /** - * Close the database connection - */ - async close(): Promise { - // LibSQL client doesn't have an explicit close method - this.debugLog("LibSQL observability adapter closed"); + super(client, options, logger); } } diff --git a/packages/libsql/src/observability-core.ts b/packages/libsql/src/observability-core.ts new file mode 100644 index 000000000..dabe2cd2e --- /dev/null +++ b/packages/libsql/src/observability-core.ts @@ -0,0 +1,880 @@ +/** + * LibSQL Observability Adapter Core + * Contains shared logic for both Node.js and Edge environments + * Environment-specific adapters extend this class + */ + +import type { Client } from "@libsql/client"; +import type { + LogFilter, + ObservabilityLogRecord, + ObservabilitySpan, + ObservabilityStorageAdapter, +} from "@voltagent/core"; +import { safeStringify } from "@voltagent/internal/utils"; +import type { Logger } from "@voltagent/logger"; + +/** + * Core configuration options for LibSQL Observability adapter + */ +export interface LibSQLObservabilityCoreOptions { + /** + * Prefix for table names + * @default "observability" + */ + tablePrefix?: string; + + /** + * Whether to enable debug logging + * @default false + */ + debug?: boolean; + + /** + * Maximum number of spans to return in a single query + * @default 1000 + */ + maxSpansPerQuery?: number; +} + +/** + * LibSQL Observability Adapter Core + * Implements all observability storage operations, receives client via dependency injection + */ +export class LibSQLObservabilityCore implements ObservabilityStorageAdapter { + protected client: Client; + protected tablePrefix: string; + protected debug: boolean; + protected logger: Logger; + protected initialized: Promise; + protected maxSpansPerQuery: number; + + constructor(client: Client, options: LibSQLObservabilityCoreOptions, logger: Logger) { + this.client = client; + this.logger = logger; + this.tablePrefix = options.tablePrefix || "observability"; + this.debug = options.debug || false; + this.maxSpansPerQuery = options.maxSpansPerQuery || 1000; + + this.debugLog("LibSQL observability adapter core initialized", { + tablePrefix: this.tablePrefix, + debug: this.debug, + maxSpansPerQuery: this.maxSpansPerQuery, + }); + + this.initialized = this.initializeDatabase(); + } + + protected debugLog(message: string, data?: unknown): void { + if (this.debug) { + this.logger.debug(`${message}`, data || ""); + } + } + + private async initializeDatabase(): Promise { + try { + await this.client.execute(` + CREATE TABLE IF NOT EXISTS ${this.tablePrefix}_spans ( + span_id TEXT PRIMARY KEY, + trace_id TEXT NOT NULL, + parent_span_id TEXT, + entity_id TEXT, + entity_type TEXT, + name TEXT NOT NULL, + kind INTEGER DEFAULT 0, + start_time TEXT NOT NULL, + end_time TEXT, + duration REAL, + status_code INTEGER DEFAULT 0, + status_message TEXT, + attributes TEXT, + events TEXT, + links TEXT, + resource TEXT, + instrumentation_scope TEXT, + created_at TEXT DEFAULT CURRENT_TIMESTAMP, + updated_at TEXT DEFAULT CURRENT_TIMESTAMP + ) + `); + + await this.client.execute(` + CREATE INDEX IF NOT EXISTS idx_${this.tablePrefix}_spans_trace_id + ON ${this.tablePrefix}_spans(trace_id) + `); + + await this.client.execute(` + CREATE INDEX IF NOT EXISTS idx_${this.tablePrefix}_spans_parent_span_id + ON ${this.tablePrefix}_spans(parent_span_id) + `); + + await this.client.execute(` + CREATE INDEX IF NOT EXISTS idx_${this.tablePrefix}_spans_start_time + ON ${this.tablePrefix}_spans(start_time) + `); + + await this.client.execute(` + CREATE INDEX IF NOT EXISTS idx_${this.tablePrefix}_spans_name + ON ${this.tablePrefix}_spans(name) + `); + + await this.client.execute(` + CREATE INDEX IF NOT EXISTS idx_${this.tablePrefix}_spans_entity_id + ON ${this.tablePrefix}_spans(entity_id) + `); + + await this.client.execute(` + CREATE INDEX IF NOT EXISTS idx_${this.tablePrefix}_spans_entity_type + ON ${this.tablePrefix}_spans(entity_type) + `); + + await this.client.execute(` + CREATE TABLE IF NOT EXISTS ${this.tablePrefix}_traces ( + trace_id TEXT PRIMARY KEY, + root_span_id TEXT, + entity_id TEXT, + entity_type TEXT, + start_time TEXT NOT NULL, + end_time TEXT, + span_count INTEGER DEFAULT 1, + created_at TEXT DEFAULT CURRENT_TIMESTAMP, + updated_at TEXT DEFAULT CURRENT_TIMESTAMP + ) + `); + + await this.client.execute(` + CREATE INDEX IF NOT EXISTS idx_${this.tablePrefix}_traces_start_time + ON ${this.tablePrefix}_traces(start_time DESC) + `); + + await this.client.execute(` + CREATE INDEX IF NOT EXISTS idx_${this.tablePrefix}_traces_entity_id + ON ${this.tablePrefix}_traces(entity_id) + `); + + await this.client.execute(` + CREATE INDEX IF NOT EXISTS idx_${this.tablePrefix}_traces_entity_type + ON ${this.tablePrefix}_traces(entity_type) + `); + + await this.client.execute(` + CREATE TABLE IF NOT EXISTS ${this.tablePrefix}_logs ( + id INTEGER PRIMARY KEY AUTOINCREMENT, + timestamp TEXT NOT NULL, + trace_id TEXT, + span_id TEXT, + trace_flags INTEGER, + severity_number INTEGER, + severity_text TEXT, + body TEXT NOT NULL, + attributes TEXT, + resource TEXT, + instrumentation_scope TEXT, + created_at TEXT DEFAULT CURRENT_TIMESTAMP + ) + `); + + await this.client.execute(` + CREATE INDEX IF NOT EXISTS idx_${this.tablePrefix}_logs_trace_id + ON ${this.tablePrefix}_logs(trace_id) + `); + + await this.client.execute(` + CREATE INDEX IF NOT EXISTS idx_${this.tablePrefix}_logs_span_id + ON ${this.tablePrefix}_logs(span_id) + `); + + await this.client.execute(` + CREATE INDEX IF NOT EXISTS idx_${this.tablePrefix}_logs_timestamp + ON ${this.tablePrefix}_logs(timestamp DESC) + `); + + await this.client.execute(` + CREATE INDEX IF NOT EXISTS idx_${this.tablePrefix}_logs_severity + ON ${this.tablePrefix}_logs(severity_number) + `); + + this.debugLog("Database tables initialized successfully"); + } catch (error) { + this.logger.error("Failed to initialize database tables", { error }); + throw error; + } + } + + protected async ensureInitialized(): Promise { + await this.initialized; + } + + async addSpan(span: ObservabilitySpan): Promise { + await this.ensureInitialized(); + + try { + const entityId = (span.attributes?.["entity.id"] as string) || null; + const entityType = (span.attributes?.["entity.type"] as string) || null; + + await this.client.batch([ + { + sql: ` + INSERT INTO ${this.tablePrefix}_spans ( + span_id, trace_id, parent_span_id, entity_id, entity_type, name, kind, + start_time, end_time, duration, + status_code, status_message, + attributes, events, links, + resource, instrumentation_scope + ) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?) + `, + args: [ + span.spanId, + span.traceId, + span.parentSpanId || null, + entityId, + entityType, + span.name, + span.kind, + span.startTime, + span.endTime || null, + span.duration || null, + span.status.code, + span.status.message || null, + safeStringify(span.attributes), + safeStringify(span.events), + span.links ? safeStringify(span.links) : null, + span.resource ? safeStringify(span.resource) : null, + span.instrumentationScope ? safeStringify(span.instrumentationScope) : null, + ], + }, + { + sql: ` + INSERT INTO ${this.tablePrefix}_traces ( + trace_id, root_span_id, entity_id, entity_type, start_time, end_time, span_count + ) VALUES (?, ?, ?, ?, ?, ?, 1) + ON CONFLICT(trace_id) DO UPDATE SET + span_count = span_count + 1, + entity_id = COALESCE(excluded.entity_id, entity_id), + entity_type = COALESCE(excluded.entity_type, entity_type), + start_time = MIN(start_time, excluded.start_time), + end_time = MAX(COALESCE(end_time, excluded.end_time), excluded.end_time), + updated_at = CURRENT_TIMESTAMP + `, + args: [ + span.traceId, + span.parentSpanId ? null : span.spanId, + entityId, + entityType, + span.startTime, + span.endTime || null, + ], + }, + ]); + + this.debugLog("Span added successfully", { + spanId: span.spanId, + traceId: span.traceId, + }); + } catch (error) { + this.logger.error("Failed to add span", { error, span }); + throw error; + } + } + + async updateSpan(spanId: string, updates: Partial): Promise { + await this.ensureInitialized(); + + try { + const setClauses: string[] = []; + const args: any[] = []; + + if (updates.endTime !== undefined) { + setClauses.push("end_time = ?"); + args.push(updates.endTime); + } + if (updates.duration !== undefined) { + setClauses.push("duration = ?"); + args.push(updates.duration); + } + if (updates.status !== undefined) { + setClauses.push("status_code = ?, status_message = ?"); + args.push(updates.status.code, updates.status.message || null); + } + if (updates.attributes !== undefined) { + setClauses.push("attributes = ?"); + args.push(safeStringify(updates.attributes)); + } + if (updates.events !== undefined) { + setClauses.push("events = ?"); + args.push(safeStringify(updates.events)); + } + if (updates.links !== undefined) { + setClauses.push("links = ?"); + args.push(safeStringify(updates.links)); + } + + if (setClauses.length === 0) { + return; + } + + setClauses.push("updated_at = CURRENT_TIMESTAMP"); + args.push(spanId); + + await this.client.execute({ + sql: ` + UPDATE ${this.tablePrefix}_spans + SET ${setClauses.join(", ")} + WHERE span_id = ? + `, + args, + }); + + if (updates.endTime) { + const span = await this.getSpan(spanId); + if (span) { + await this.client.execute({ + sql: ` + UPDATE ${this.tablePrefix}_traces + SET end_time = MAX(COALESCE(end_time, ?), ?), + updated_at = CURRENT_TIMESTAMP + WHERE trace_id = ? + `, + args: [updates.endTime, updates.endTime, span.traceId], + }); + } + } + + this.debugLog("Span updated successfully", { spanId, updates }); + } catch (error) { + this.logger.error("Failed to update span", { error, spanId, updates }); + throw error; + } + } + + async getSpan(spanId: string): Promise { + await this.ensureInitialized(); + + try { + const result = await this.client.execute({ + sql: `SELECT * FROM ${this.tablePrefix}_spans WHERE span_id = ?`, + args: [spanId], + }); + + if (result.rows.length === 0) { + return null; + } + + return this.rowToSpan(result.rows[0]); + } catch (error) { + this.logger.error("Failed to get span", { error, spanId }); + throw error; + } + } + + async getTrace(traceId: string): Promise { + await this.ensureInitialized(); + + try { + const result = await this.client.execute({ + sql: ` + SELECT * FROM ${this.tablePrefix}_spans + WHERE trace_id = ? + ORDER BY start_time ASC + LIMIT ? + `, + args: [traceId, this.maxSpansPerQuery], + }); + + return result.rows.map((row) => this.rowToSpan(row)); + } catch (error) { + this.logger.error("Failed to get trace", { error, traceId }); + throw error; + } + } + + async listTraces( + limit = 100, + offset = 0, + filter?: { + entityId?: string; + entityType?: "agent" | "workflow"; + }, + ): Promise { + await this.ensureInitialized(); + + try { + let sql: string; + let args: any[] = []; + const conditions: string[] = []; + + if (filter?.entityId) { + conditions.push("entity_id = ?"); + args.push(filter.entityId); + } + + if (filter?.entityType) { + conditions.push("entity_type = ?"); + args.push(filter.entityType); + } + + if (conditions.length > 0) { + sql = ` + SELECT trace_id FROM ${this.tablePrefix}_traces + WHERE ${conditions.join(" AND ")} + ORDER BY start_time DESC + LIMIT ? OFFSET ? + `; + args.push(limit, offset); + } else { + sql = ` + SELECT trace_id FROM ${this.tablePrefix}_traces + ORDER BY start_time DESC + LIMIT ? OFFSET ? + `; + args = [limit, offset]; + } + + const result = await this.client.execute({ sql, args }); + return result.rows.map((row) => row.trace_id as string); + } catch (error) { + this.logger.error("Failed to list traces", { error, limit, offset, filter }); + throw error; + } + } + + async deleteOldSpans(beforeTimestamp: number): Promise { + await this.ensureInitialized(); + + try { + const beforeDate = new Date(beforeTimestamp).toISOString(); + + const tracesResult = await this.client.execute({ + sql: `SELECT DISTINCT trace_id FROM ${this.tablePrefix}_spans WHERE start_time < ?`, + args: [beforeDate], + }); + + const affectedTraceIds = tracesResult.rows.map((row) => row.trace_id as string); + + const deleteResult = await this.client.execute({ + sql: `DELETE FROM ${this.tablePrefix}_spans WHERE start_time < ?`, + args: [beforeDate], + }); + + if (affectedTraceIds.length > 0) { + for (const traceId of affectedTraceIds) { + const countResult = await this.client.execute({ + sql: `SELECT COUNT(*) as count FROM ${this.tablePrefix}_spans WHERE trace_id = ?`, + args: [traceId], + }); + + const count = countResult.rows[0].count as number; + if (count === 0) { + await this.client.execute({ + sql: `DELETE FROM ${this.tablePrefix}_traces WHERE trace_id = ?`, + args: [traceId], + }); + } else { + await this.client.execute({ + sql: ` + UPDATE ${this.tablePrefix}_traces + SET span_count = ?, updated_at = CURRENT_TIMESTAMP + WHERE trace_id = ? + `, + args: [count, traceId], + }); + } + } + } + + const deletedCount = deleteResult.rowsAffected || 0; + this.debugLog("Old spans deleted", { deletedCount, beforeDate }); + return deletedCount; + } catch (error) { + this.logger.error("Failed to delete old spans", { error, beforeTimestamp }); + throw error; + } + } + + async clear(): Promise { + await this.ensureInitialized(); + + try { + await this.client.batch([ + { sql: `DELETE FROM ${this.tablePrefix}_spans`, args: [] }, + { sql: `DELETE FROM ${this.tablePrefix}_traces`, args: [] }, + { sql: `DELETE FROM ${this.tablePrefix}_logs`, args: [] }, + ]); + + this.debugLog("All spans, traces, and logs cleared"); + } catch (error) { + this.logger.error("Failed to clear data", { error }); + throw error; + } + } + + private rowToSpan(row: any): ObservabilitySpan { + const span: ObservabilitySpan = { + traceId: row.trace_id as string, + spanId: row.span_id as string, + name: row.name as string, + kind: row.kind as number, + startTime: row.start_time as string, + status: { + code: row.status_code as number, + }, + attributes: row.attributes ? JSON.parse(row.attributes as string) : {}, + events: row.events ? JSON.parse(row.events as string) : [], + }; + + if (row.parent_span_id !== null) { + span.parentSpanId = row.parent_span_id as string; + } + if (row.end_time !== null) { + span.endTime = row.end_time as string; + } + if (row.duration !== null) { + span.duration = row.duration as number; + } + if (row.status_message !== null) { + span.status.message = row.status_message as string; + } + if (row.links && row.links !== "null") { + const links = JSON.parse(row.links as string); + if (links && links.length > 0) { + span.links = links; + } + } + if (row.resource && row.resource !== "null") { + const resource = JSON.parse(row.resource as string); + if (resource && Object.keys(resource).length > 0) { + span.resource = resource; + } + } + if (row.instrumentation_scope && row.instrumentation_scope !== "null") { + const scope = JSON.parse(row.instrumentation_scope as string); + if (scope) { + span.instrumentationScope = scope; + } + } + + return span; + } + + async getStats(): Promise<{ + spanCount: number; + traceCount: number; + oldestSpan?: Date; + newestSpan?: Date; + }> { + await this.ensureInitialized(); + + try { + const [spanCountResult, traceCountResult, timeRangeResult] = await Promise.all([ + this.client.execute(`SELECT COUNT(*) as count FROM ${this.tablePrefix}_spans`), + this.client.execute(`SELECT COUNT(*) as count FROM ${this.tablePrefix}_traces`), + this.client.execute(` + SELECT MIN(start_time) as oldest, MAX(start_time) as newest + FROM ${this.tablePrefix}_spans + `), + ]); + + const stats: any = { + spanCount: spanCountResult.rows[0].count as number, + traceCount: traceCountResult.rows[0].count as number, + }; + + if (timeRangeResult.rows[0].oldest) { + stats.oldestSpan = new Date(timeRangeResult.rows[0].oldest as string); + } + if (timeRangeResult.rows[0].newest) { + stats.newestSpan = new Date(timeRangeResult.rows[0].newest as string); + } + + return stats; + } catch (error) { + this.logger.error("Failed to get stats", { error }); + throw error; + } + } + + async saveLogRecord(logRecord: any): Promise { + await this.ensureInitialized(); + + try { + let timestamp: string; + if (Array.isArray(logRecord.hrTime)) { + const timeMs = logRecord.hrTime[0] * 1000 + logRecord.hrTime[1] / 1000000; + timestamp = new Date(timeMs).toISOString(); + } else if (logRecord.timestamp) { + timestamp = + typeof logRecord.timestamp === "string" + ? logRecord.timestamp + : new Date(logRecord.timestamp).toISOString(); + } else { + timestamp = new Date().toISOString(); + } + + const spanContext = logRecord.spanContext || {}; + const traceId = spanContext.traceId || null; + const spanId = spanContext.spanId || null; + const traceFlags = spanContext.traceFlags ?? null; + + const severityNumber = logRecord.severityNumber ?? null; + const severityText = logRecord.severityText || null; + const body = + typeof logRecord.body === "string" ? logRecord.body : safeStringify(logRecord.body); + const attributes = logRecord.attributes ? safeStringify(logRecord.attributes) : null; + const resource = logRecord.resource?.attributes + ? safeStringify(logRecord.resource.attributes) + : null; + const instrumentationScope = + logRecord.instrumentationLibrary || logRecord.instrumentationScope + ? safeStringify(logRecord.instrumentationLibrary || logRecord.instrumentationScope) + : null; + + await this.client.execute({ + sql: ` + INSERT INTO ${this.tablePrefix}_logs ( + timestamp, trace_id, span_id, trace_flags, + severity_number, severity_text, body, + attributes, resource, instrumentation_scope + ) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?) + `, + args: [ + timestamp, + traceId, + spanId, + traceFlags, + severityNumber, + severityText, + body, + attributes, + resource, + instrumentationScope, + ], + }); + + this.debugLog("Log record saved successfully", { + timestamp, + traceId, + spanId, + severityNumber, + }); + } catch (error) { + this.logger.error("Failed to save log record", { error, logRecord }); + throw error; + } + } + + async getLogsByTraceId(traceId: string): Promise { + await this.ensureInitialized(); + + try { + const result = await this.client.execute({ + sql: ` + SELECT * FROM ${this.tablePrefix}_logs + WHERE trace_id = ? + ORDER BY timestamp DESC + LIMIT ? + `, + args: [traceId, this.maxSpansPerQuery], + }); + + return result.rows.map((row) => this.rowToLogRecord(row)); + } catch (error) { + this.logger.error("Failed to get logs by trace ID", { error, traceId }); + throw error; + } + } + + async getLogsBySpanId(spanId: string): Promise { + await this.ensureInitialized(); + + try { + const result = await this.client.execute({ + sql: ` + SELECT * FROM ${this.tablePrefix}_logs + WHERE span_id = ? + ORDER BY timestamp DESC + LIMIT ? + `, + args: [spanId, this.maxSpansPerQuery], + }); + + return result.rows.map((row) => this.rowToLogRecord(row)); + } catch (error) { + this.logger.error("Failed to get logs by span ID", { error, spanId }); + throw error; + } + } + + async queryLogs(filter: LogFilter): Promise { + await this.ensureInitialized(); + + try { + const whereClauses: string[] = []; + const args: any[] = []; + + if (filter.traceId) { + whereClauses.push("trace_id = ?"); + args.push(filter.traceId); + } + if (filter.spanId) { + whereClauses.push("span_id = ?"); + args.push(filter.spanId); + } + if (filter.severityNumber !== undefined) { + whereClauses.push("severity_number >= ?"); + args.push(filter.severityNumber); + } + if (filter.severityText) { + whereClauses.push("severity_text = ?"); + args.push(filter.severityText); + } + if (filter.instrumentationScope) { + whereClauses.push("instrumentation_scope LIKE ?"); + args.push(`%${filter.instrumentationScope}%`); + } + if (filter.startTimeMin !== undefined) { + const minTime = new Date(filter.startTimeMin).toISOString(); + whereClauses.push("timestamp >= ?"); + args.push(minTime); + } + if (filter.startTimeMax !== undefined) { + const maxTime = new Date(filter.startTimeMax).toISOString(); + whereClauses.push("timestamp <= ?"); + args.push(maxTime); + } + if (filter.bodyContains) { + whereClauses.push("body LIKE ?"); + args.push(`%${filter.bodyContains}%`); + } + + const whereClause = whereClauses.length > 0 ? `WHERE ${whereClauses.join(" AND ")}` : ""; + + const limit = filter.limit || this.maxSpansPerQuery; + args.push(limit); + + const result = await this.client.execute({ + sql: ` + SELECT * FROM ${this.tablePrefix}_logs + ${whereClause} + ORDER BY timestamp DESC + LIMIT ? + `, + args, + }); + + const logs = result.rows.map((row) => this.rowToLogRecord(row)); + + if (filter.attributeKey) { + const key = filter.attributeKey; + return logs.filter((log) => { + if (!log.attributes) return false; + if (filter.attributeValue !== undefined) { + return log.attributes[key] === filter.attributeValue; + } + return key in log.attributes; + }); + } + + return logs; + } catch (error) { + this.logger.error("Failed to query logs", { error, filter }); + throw error; + } + } + + async deleteOldLogs(beforeTimestamp: number): Promise { + await this.ensureInitialized(); + + try { + const beforeDate = new Date(beforeTimestamp).toISOString(); + + const result = await this.client.execute({ + sql: `DELETE FROM ${this.tablePrefix}_logs WHERE timestamp < ?`, + args: [beforeDate], + }); + + const deletedCount = result.rowsAffected || 0; + this.debugLog("Old logs deleted", { deletedCount, beforeDate }); + return deletedCount; + } catch (error) { + this.logger.error("Failed to delete old logs", { error, beforeTimestamp }); + throw error; + } + } + + private rowToLogRecord(row: any): ObservabilityLogRecord { + const log: ObservabilityLogRecord = { + timestamp: row.timestamp as string, + body: (() => { + try { + const bodyStr = row.body as string; + if (bodyStr.startsWith("{") || bodyStr.startsWith("[")) { + return JSON.parse(bodyStr); + } + } catch { + // If parsing fails, treat as string + } + return row.body as string; + })(), + }; + + if (row.trace_id !== null) { + log.traceId = row.trace_id as string; + } + if (row.span_id !== null) { + log.spanId = row.span_id as string; + } + if (row.trace_flags !== null) { + log.traceFlags = row.trace_flags as number; + } + if (row.severity_number !== null) { + log.severityNumber = row.severity_number as number; + } + if (row.severity_text !== null) { + log.severityText = row.severity_text as string; + } + if (row.attributes && row.attributes !== "null") { + try { + const attributes = JSON.parse(row.attributes as string); + if (attributes && Object.keys(attributes).length > 0) { + log.attributes = attributes; + } + } catch { + // Skip if parsing fails + } + } + if (row.resource && row.resource !== "null") { + try { + const resource = JSON.parse(row.resource as string); + if (resource && Object.keys(resource).length > 0) { + log.resource = resource; + } + } catch { + // Skip if parsing fails + } + } + if (row.instrumentation_scope && row.instrumentation_scope !== "null") { + try { + const scope = JSON.parse(row.instrumentation_scope as string); + if (scope) { + log.instrumentationScope = scope; + } + } catch { + // Skip if parsing fails + } + } + + return log; + } + + getInfo() { + return { + adapter: this.constructor.name, + displayName: "LibSQL Observability Storage", + persistent: true, + description: "Persists spans and logs to a LibSQL/Turso database for long-term retention.", + }; + } + + async close(): Promise { + this.debugLog("LibSQL observability adapter closed"); + } +} diff --git a/packages/libsql/src/vector-adapter-edge.ts b/packages/libsql/src/vector-adapter-edge.ts new file mode 100644 index 000000000..74bed0864 --- /dev/null +++ b/packages/libsql/src/vector-adapter-edge.ts @@ -0,0 +1,77 @@ +/** + * LibSQL Vector Adapter - Edge/Cloudflare Workers + * Provides vector storage and similarity search using remote Turso database + * Uses @libsql/client/web for edge runtime compatibility + */ + +import { createClient } from "@libsql/client/web"; +import { createPinoLogger } from "@voltagent/logger"; +import type { Logger } from "@voltagent/logger"; +import { LibSQLVectorCore, type LibSQLVectorCoreOptions } from "./vector-core"; + +/** + * LibSQL Vector Adapter configuration options (Edge) + */ +export interface LibSQLVectorEdgeOptions extends LibSQLVectorCoreOptions { + /** + * Database URL - must be a remote Turso URL (libsql://) + * File-based URLs are not supported in edge environments + */ + url: string; + + /** + * Auth token for remote connections (required for Turso) + */ + authToken: string; + + /** + * Logger instance + */ + logger?: Logger; +} + +/** + * LibSQL Vector Adapter - Edge Compatible + * Production-ready vector storage with similarity search + * Only supports remote Turso databases (libsql://) + */ +export class LibSQLVectorAdapterEdge extends LibSQLVectorCore { + constructor(options: LibSQLVectorEdgeOptions) { + // Validate URL - edge only supports remote URLs + if (!options.url) { + throw new Error("LibSQLVectorAdapterEdge requires a url option"); + } + + if ( + options.url.startsWith("file:") || + options.url === ":memory:" || + !options.url.startsWith("libsql://") + ) { + throw new Error( + "LibSQLVectorAdapterEdge only supports remote Turso URLs (libsql://). " + + "File-based databases are not supported in edge environments. " + + "Use LibSQLVectorAdapter from '@voltagent/libsql' for Node.js environments.", + ); + } + + if (!options.authToken) { + throw new Error("LibSQLVectorAdapterEdge requires an authToken for remote connections"); + } + + // Initialize logger + const logger = + options.logger ?? + createPinoLogger({ + name: "libsql-vector-adapter-edge", + level: options.debug ? "debug" : "info", + }); + + // Initialize LibSQL client using web-compatible import + const client = createClient({ + url: options.url, + authToken: options.authToken, + }); + + super(client, options, logger); + } +} diff --git a/packages/libsql/src/vector-adapter.ts b/packages/libsql/src/vector-adapter.ts index a38baec8c..2e177974c 100644 --- a/packages/libsql/src/vector-adapter.ts +++ b/packages/libsql/src/vector-adapter.ts @@ -1,26 +1,19 @@ /** - * LibSQL Vector Adapter + * LibSQL Vector Adapter - Node.js * Provides vector storage and similarity search using LibSQL/Turso database - * Stores vectors as binary BLOBs for efficiency */ import fs from "node:fs"; import path from "node:path"; -import { type Client, createClient } from "@libsql/client"; -import { - type SearchResult, - type VectorAdapter, - type VectorItem, - type VectorSearchOptions, - cosineSimilarity, -} from "@voltagent/core"; -import { safeStringify } from "@voltagent/internal"; -import { type Logger, createPinoLogger } from "@voltagent/logger"; +import { createClient } from "@libsql/client"; +import { createPinoLogger } from "@voltagent/logger"; +import type { Logger } from "@voltagent/logger"; +import { LibSQLVectorCore, type LibSQLVectorCoreOptions } from "./vector-core"; /** * LibSQL Vector Adapter configuration options */ -export interface LibSQLVectorOptions { +export interface LibSQLVectorOptions extends LibSQLVectorCoreOptions { /** * Database URL (e.g., 'file:./memory.db' or 'libsql://...') * @default "file:./.voltagent/memory.db" @@ -32,109 +25,44 @@ export interface LibSQLVectorOptions { */ authToken?: string; - /** - * Prefix for table names - * @default "voltagent" - */ - tablePrefix?: string; - - /** - * Maximum vector dimensions allowed - * @default 1536 - */ - maxVectorDimensions?: number; - - /** - * Size of the LRU cache for frequently accessed vectors - * @default 100 - */ - cacheSize?: number; - - /** - * Batch size for bulk operations - * @default 100 - */ - batchSize?: number; - - /** - * Enable debug logging - * @default false - */ - debug?: boolean; - /** * Logger instance */ logger?: Logger; - - /** - * Maximum number of retries for database operations - * @default 3 - */ - maxRetries?: number; - - /** - * Initial retry delay in milliseconds - * @default 100 - */ - retryDelayMs?: number; } /** - * LibSQL Vector Adapter + * LibSQL Vector Adapter - Node.js * Production-ready vector storage with similarity search + * Supports both local SQLite files and remote Turso databases */ -export class LibSQLVectorAdapter implements VectorAdapter { - private client: Client; - private tablePrefix: string; - private maxVectorDimensions: number; - private cacheSize: number; - private batchSize: number; - private debug: boolean; - private logger: Logger; - private maxRetries: number; - private retryDelayMs: number; - private url: string; - private initialized = false; - private vectorCache: Map; - private dimensions: number | null = null; - +export class LibSQLVectorAdapter extends LibSQLVectorCore { constructor(options: LibSQLVectorOptions = {}) { - this.tablePrefix = options.tablePrefix ?? "voltagent"; - this.maxVectorDimensions = options.maxVectorDimensions ?? 1536; - this.cacheSize = options.cacheSize ?? 100; - this.batchSize = options.batchSize ?? 100; - this.maxRetries = options.maxRetries ?? 3; - this.retryDelayMs = options.retryDelayMs ?? 100; - this.debug = options.debug ?? false; - // Initialize logger - this.logger = + const logger = options.logger ?? createPinoLogger({ name: "libsql-vector-adapter", - level: this.debug ? "debug" : "info", + level: options.debug ? "debug" : "info", }); // Normalize database URL const requestedUrl = options.url ?? "file:./.voltagent/memory.db"; - // In-memory: use cache=shared which is supported by @libsql/core for :memory: - // Accept both ":memory:" and "file::memory:" inputs and normalize to URI form. + let url: string; + if ( requestedUrl === ":memory:" || requestedUrl === "file::memory:" || requestedUrl.startsWith("file::memory:") ) { - // Use private, per-connection in-memory database (no shared cache) - // Accept either form and normalize to ":memory:" which @libsql/core expands to file::memory: - this.url = ":memory:"; + url = ":memory:"; } else { - this.url = requestedUrl; + url = requestedUrl; } // Ensure directory exists for file-based databases (skip pure in-memory) - if (this.url.startsWith("file:") && !this.url.startsWith("file::memory:")) { - const dbPath = this.url.replace("file:", ""); + if (url.startsWith("file:") && !url.startsWith("file::memory:")) { + const dbPath = url.replace("file:", ""); const dbDir = path.dirname(dbPath); if (!fs.existsSync(dbDir)) { fs.mkdirSync(dbDir, { recursive: true }); @@ -142,53 +70,18 @@ export class LibSQLVectorAdapter implements VectorAdapter { } // Initialize LibSQL client - this.client = createClient({ - url: this.url, + const client = createClient({ + url: url, authToken: options.authToken, }); - // Initialize cache - this.vectorCache = new Map(); + super(client, options, logger); } /** - * Initialize the database schema + * Override to use Buffer for more efficient serialization in Node.js */ - private async initialize(): Promise { - if (this.initialized) return; - - const tableName = `${this.tablePrefix}_vectors`; - - try { - // Create vectors table and indexes atomically - await this.client.executeMultiple(` - BEGIN; - CREATE TABLE IF NOT EXISTS ${tableName} ( - id TEXT PRIMARY KEY, - vector BLOB NOT NULL, - dimensions INTEGER NOT NULL, - metadata TEXT, - content TEXT, - created_at DATETIME DEFAULT CURRENT_TIMESTAMP, - updated_at DATETIME DEFAULT CURRENT_TIMESTAMP - ); - CREATE INDEX IF NOT EXISTS idx_${tableName}_created ON ${tableName}(created_at); - CREATE INDEX IF NOT EXISTS idx_${tableName}_dimensions ON ${tableName}(dimensions); - COMMIT; - `); - - this.initialized = true; - this.logger.debug("Vector adapter initialized"); - } catch (error) { - this.logger.error("Failed to initialize vector adapter", error as Error); - throw error; - } - } - - /** - * Serialize a vector to binary format - */ - private serializeVector(vector: number[]): Buffer { + protected serializeVector(vector: number[]): Uint8Array { const buffer = Buffer.allocUnsafe(vector.length * 4); for (let i = 0; i < vector.length; i++) { buffer.writeFloatLE(vector[i], i * 4); @@ -197,456 +90,21 @@ export class LibSQLVectorAdapter implements VectorAdapter { } /** - * Deserialize a vector from binary format + * Override to use Buffer for more efficient deserialization in Node.js */ - private deserializeVector(buffer: Buffer | Uint8Array | ArrayBuffer): number[] { - let bytes: Buffer; - if (buffer instanceof Buffer) { - bytes = buffer; - } else if (buffer instanceof ArrayBuffer) { - bytes = Buffer.from(buffer); + protected deserializeVector(data: Uint8Array | ArrayBuffer): number[] { + let buffer: Buffer; + if (data instanceof Buffer) { + buffer = data; + } else if (data instanceof ArrayBuffer) { + buffer = Buffer.from(data); } else { - bytes = Buffer.from(buffer); + buffer = Buffer.from(data); } const vector: number[] = []; - for (let i = 0; i < bytes.length; i += 4) { - vector.push(bytes.readFloatLE(i)); + for (let i = 0; i < buffer.length; i += 4) { + vector.push(buffer.readFloatLE(i)); } return vector; } - - /** - * Execute a database operation with retries - */ - private async executeWithRetry(operation: () => Promise, context: string): Promise { - let lastError: Error | undefined; - let delay = this.retryDelayMs; - - for (let attempt = 1; attempt <= this.maxRetries; attempt++) { - try { - return await operation(); - } catch (error) { - lastError = error as Error; - this.logger.warn(`Operation failed (attempt ${attempt}): ${context}`, error as Error); - - if (attempt < this.maxRetries) { - await new Promise((resolve) => setTimeout(resolve, delay)); - delay *= 2; // Exponential backoff - } - } - } - - this.logger.error(`Operation failed after ${this.maxRetries} attempts: ${context}`, lastError); - throw lastError; - } - - /** - * Store a vector with associated metadata - */ - async store(id: string, vector: number[], metadata?: Record): Promise { - await this.initialize(); - - // Validate vector contents - if (!Array.isArray(vector) || vector.length === 0) { - throw new Error("Vector must be a non-empty array"); - } - - // Validate dimensions - if (vector.length > this.maxVectorDimensions) { - throw new Error( - `Vector dimensions (${vector.length}) exceed maximum (${this.maxVectorDimensions})`, - ); - } - - if (this.dimensions === null) { - this.dimensions = vector.length; - } else if (vector.length !== this.dimensions) { - throw new Error( - `Vector dimension mismatch. Expected ${this.dimensions}, got ${vector.length}`, - ); - } - - const tableName = `${this.tablePrefix}_vectors`; - const serializedVector = this.serializeVector(vector); - const metadataJson = metadata ? safeStringify(metadata) : null; - - await this.executeWithRetry(async () => { - await this.client.execute({ - sql: ` - INSERT OR REPLACE INTO ${tableName} - (id, vector, dimensions, metadata, updated_at) - VALUES (?, ?, ?, ?, CURRENT_TIMESTAMP) - `, - args: [id, serializedVector, vector.length, metadataJson], - }); - }, `store vector ${id}`); - - // Update cache - if (this.vectorCache.size >= this.cacheSize) { - const firstKey = this.vectorCache.keys().next().value; - if (firstKey) this.vectorCache.delete(firstKey); - } - this.vectorCache.set(id, { id, vector, metadata }); - - this.logger.debug(`Vector stored: ${id} (${vector.length} dimensions)`); - } - - /** - * Store multiple vectors in batch - */ - async storeBatch(items: VectorItem[]): Promise { - await this.initialize(); - - if (items.length === 0) return; - - const tableName = `${this.tablePrefix}_vectors`; - - // Process in batches to avoid memory issues - for (let i = 0; i < items.length; i += this.batchSize) { - const batch = items.slice(i, i + this.batchSize); - - await this.executeWithRetry(async () => { - const stmts: { sql: string; args: any[] }[] = []; - for (const item of batch) { - if (!Array.isArray(item.vector) || item.vector.length === 0) { - throw new Error("Vector must be a non-empty array"); - } - // Validate dimensions - if (this.dimensions === null) { - this.dimensions = item.vector.length; - } else if (item.vector.length !== this.dimensions) { - throw new Error( - `Vector dimension mismatch. Expected ${this.dimensions}, got ${item.vector.length}`, - ); - } - - const serializedVector = this.serializeVector(item.vector); - const metadataJson = item.metadata ? safeStringify(item.metadata) : null; - const content = item.content ?? null; - stmts.push({ - sql: `INSERT OR REPLACE INTO ${tableName} (id, vector, dimensions, metadata, content, updated_at) VALUES (?, ?, ?, ?, ?, CURRENT_TIMESTAMP)`, - args: [item.id, serializedVector, item.vector.length, metadataJson, content], - }); - } - await this.client.batch(stmts, "write"); - }, `storeBatch ${batch.length} vectors`); - - this.logger.debug(`Batch of ${batch.length} vectors stored`); - } - } - - /** - * Search for similar vectors using cosine similarity - */ - async search(queryVector: number[], options?: VectorSearchOptions): Promise { - await this.initialize(); - - const { limit = 10, threshold = 0, filter } = options || {}; - - // Validate query vector dimensions - if (this.dimensions !== null && queryVector.length !== this.dimensions) { - throw new Error( - `Query vector dimension mismatch. Expected ${this.dimensions}, got ${queryVector.length}`, - ); - } - - const tableName = `${this.tablePrefix}_vectors`; - - // Build query with optional dimension filter - let query = `SELECT id, vector, dimensions, metadata, content FROM ${tableName}`; - const args: any[] = []; - - if (this.dimensions !== null) { - query += " WHERE dimensions = ?"; - args.push(this.dimensions); - } - - const result = await this.executeWithRetry( - async () => await this.client.execute({ sql: query, args }), - "search vectors", - ); - - const searchResults: SearchResult[] = []; - - // Calculate similarities for all vectors - for (const row of result.rows) { - const id = row.id as string; - const vectorBlob = row.vector as Uint8Array | ArrayBuffer; - const metadataJson = row.metadata as string | null; - const content = (row.content as string | null) ?? undefined; - - // Parse metadata - const metadata = metadataJson ? JSON.parse(metadataJson) : undefined; - - // Apply metadata filter if provided - if (filter && !this.matchesFilter(metadata, filter)) { - continue; - } - - // Deserialize vector - const vector = this.deserializeVector(vectorBlob); - - // Calculate cosine similarity - const similarity = cosineSimilarity(queryVector, vector); - - // Convert similarity to score (0-1 range where 1 is most similar) - const score = (similarity + 1) / 2; - - if (score >= threshold) { - searchResults.push({ - id, - vector, - metadata, - content, - score, - distance: 1 - similarity, // Convert to distance metric - }); - } - } - - // Sort by score (descending) and limit results - searchResults.sort((a, b) => b.score - a.score); - - return searchResults.slice(0, limit); - } - - /** - * Check if metadata matches the filter criteria - */ - private matchesFilter( - metadata: Record | undefined, - filter: Record, - ): boolean { - if (!metadata) { - return false; - } - - for (const [key, value] of Object.entries(filter)) { - if (metadata[key] !== value) { - return false; - } - } - - return true; - } - - /** - * Delete a vector by ID - */ - async delete(id: string): Promise { - await this.initialize(); - - const tableName = `${this.tablePrefix}_vectors`; - - await this.executeWithRetry(async () => { - await this.client.execute({ - sql: `DELETE FROM ${tableName} WHERE id = ?`, - args: [id], - }); - }, `delete vector ${id}`); - - // Remove from cache - this.vectorCache.delete(id); - - this.logger.debug(`Vector deleted: ${id}`); - } - - /** - * Delete multiple vectors by IDs - */ - async deleteBatch(ids: string[]): Promise { - await this.initialize(); - - if (ids.length === 0) return; - - const tableName = `${this.tablePrefix}_vectors`; - - // Process in batches - for (let i = 0; i < ids.length; i += this.batchSize) { - const batch = ids.slice(i, i + this.batchSize); - const placeholders = batch.map(() => "?").join(","); - - await this.executeWithRetry(async () => { - await this.client.execute({ - sql: `DELETE FROM ${tableName} WHERE id IN (${placeholders})`, - args: batch, - }); - }, `deleteBatch ${batch.length} vectors`); - - // Remove from cache - for (const id of batch) { - this.vectorCache.delete(id); - } - - this.logger.debug(`Batch of ${batch.length} vectors deleted`); - } - } - - /** - * Clear all vectors - */ - async clear(): Promise { - await this.initialize(); - - const tableName = `${this.tablePrefix}_vectors`; - - await this.executeWithRetry(async () => { - await this.client.execute(`DELETE FROM ${tableName}`); - }, "clear all vectors"); - - // Clear cache and reset dimensions - this.vectorCache.clear(); - this.dimensions = null; - - this.logger.debug("All vectors cleared"); - } - - /** - * Get total count of stored vectors - */ - async count(): Promise { - await this.initialize(); - - const tableName = `${this.tablePrefix}_vectors`; - - const result = await this.executeWithRetry( - async () => await this.client.execute(`SELECT COUNT(*) as count FROM ${tableName}`), - "count vectors", - ); - - const raw = result.rows[0]?.count as any; - // libsql/sqlite may return number, string, or bigint depending on driver - if (typeof raw === "bigint") return Number(raw); - if (typeof raw === "string") return Number.parseInt(raw, 10) || 0; - return (raw as number) ?? 0; - } - - /** - * Get a specific vector by ID - */ - async get(id: string): Promise { - await this.initialize(); - - // Check cache first - if (this.vectorCache.has(id)) { - const cached = this.vectorCache.get(id); - if (cached) { - return { - ...cached, - vector: [...cached.vector], // Return a copy - metadata: cached.metadata ? { ...cached.metadata } : undefined, - }; - } - } - - const tableName = `${this.tablePrefix}_vectors`; - - const result = await this.executeWithRetry( - async () => - await this.client.execute({ - sql: `SELECT id, vector, metadata, content FROM ${tableName} WHERE id = ?`, - args: [id], - }), - `get vector ${id}`, - ); - - if (result.rows.length === 0) { - return null; - } - - const row = result.rows[0]; - const vectorBlob = row.vector as unknown as Uint8Array | ArrayBuffer; - const metadataJson = row.metadata as string | null; - const content = row.content as string | null; - - const vector = this.deserializeVector(vectorBlob); - const metadata = metadataJson ? JSON.parse(metadataJson) : undefined; - - const item: VectorItem = { - id, - vector, - metadata, - content: content ?? undefined, - }; - - // Update cache - if (this.vectorCache.size >= this.cacheSize) { - const firstKey = this.vectorCache.keys().next().value; - if (firstKey) this.vectorCache.delete(firstKey); - } - this.vectorCache.set(id, item); - - return item; - } - - /** - * Close the database connection - */ - async close(): Promise { - this.vectorCache.clear(); - this.logger.debug("Vector adapter closed"); - try { - (this.client as any)?.close?.(); - } catch { - // ignore - } - } - - /** - * Get statistics about the vector table and cache - */ - async getStats(): Promise<{ - count: number; - dimensions: number | null; - cacheSize: number; - tableSizeBytes: number; - }> { - await this.initialize(); - - const tableName = `${this.tablePrefix}_vectors`; - - const [countResult, sizeResult] = await Promise.all([ - this.executeWithRetry( - async () => - await this.client.execute( - `SELECT COUNT(*) as count, MAX(dimensions) as dims FROM ${tableName}`, - ), - "getStats count", - ), - // Approximate table size by summing blob/text lengths - this.executeWithRetry( - async () => - await this.client.execute({ - sql: `SELECT - COALESCE(SUM(LENGTH(id)),0) + - COALESCE(SUM(LENGTH(vector)),0) + - COALESCE(SUM(LENGTH(metadata)),0) + - COALESCE(SUM(LENGTH(content)),0) AS size - FROM ${tableName}`, - }), - "getStats size", - ), - ]); - - const row1 = countResult.rows[0] as any; - const row2 = sizeResult.rows[0] as any; - - const countRaw = row1?.count as any; - const dimsRaw = row1?.dims as any; - const sizeRaw = row2?.size as any; - - const normalize = (v: any): number => - typeof v === "bigint" - ? Number(v) - : typeof v === "string" - ? Number.parseInt(v, 10) || 0 - : (v ?? 0); - - return { - count: normalize(countRaw), - dimensions: dimsRaw != null ? normalize(dimsRaw) : this.dimensions, - cacheSize: this.vectorCache.size, - tableSizeBytes: normalize(sizeRaw), - }; - } } diff --git a/packages/libsql/src/vector-core.ts b/packages/libsql/src/vector-core.ts new file mode 100644 index 000000000..023cd73bd --- /dev/null +++ b/packages/libsql/src/vector-core.ts @@ -0,0 +1,532 @@ +/** + * LibSQL Vector Adapter Core + * Contains shared logic for both Node.js and Edge environments + * Environment-specific adapters extend this class + */ + +import type { Client } from "@libsql/client"; +import { + type SearchResult, + type VectorAdapter, + type VectorItem, + type VectorSearchOptions, + cosineSimilarity, +} from "@voltagent/core"; +import { safeStringify } from "@voltagent/internal"; +import type { Logger } from "@voltagent/logger"; + +/** + * Core configuration options for LibSQL Vector adapter + */ +export interface LibSQLVectorCoreOptions { + /** + * Prefix for table names + * @default "voltagent" + */ + tablePrefix?: string; + + /** + * Maximum vector dimensions allowed + * @default 1536 + */ + maxVectorDimensions?: number; + + /** + * Size of the LRU cache for frequently accessed vectors + * @default 100 + */ + cacheSize?: number; + + /** + * Batch size for bulk operations + * @default 100 + */ + batchSize?: number; + + /** + * Enable debug logging + * @default false + */ + debug?: boolean; + + /** + * Maximum number of retries for database operations + * @default 3 + */ + maxRetries?: number; + + /** + * Initial retry delay in milliseconds + * @default 100 + */ + retryDelayMs?: number; +} + +/** + * LibSQL Vector Adapter Core + * Implements all vector storage operations, receives client via dependency injection + */ +export class LibSQLVectorCore implements VectorAdapter { + protected client: Client; + protected tablePrefix: string; + protected maxVectorDimensions: number; + protected cacheSize: number; + protected batchSize: number; + protected debug: boolean; + protected logger: Logger; + protected maxRetries: number; + protected retryDelayMs: number; + protected initialized = false; + protected vectorCache: Map; + protected dimensions: number | null = null; + + constructor(client: Client, options: LibSQLVectorCoreOptions, logger: Logger) { + this.client = client; + this.tablePrefix = options.tablePrefix ?? "voltagent"; + this.maxVectorDimensions = options.maxVectorDimensions ?? 1536; + this.cacheSize = options.cacheSize ?? 100; + this.batchSize = options.batchSize ?? 100; + this.maxRetries = options.maxRetries ?? 3; + this.retryDelayMs = options.retryDelayMs ?? 100; + this.debug = options.debug ?? false; + this.logger = logger; + + this.vectorCache = new Map(); + } + + /** + * Serialize a vector to binary format + * Uses ArrayBuffer/DataView for cross-platform compatibility + */ + protected serializeVector(vector: number[]): Uint8Array { + const buffer = new ArrayBuffer(vector.length * 4); + const view = new DataView(buffer); + for (let i = 0; i < vector.length; i++) { + view.setFloat32(i * 4, vector[i], true); // little-endian + } + return new Uint8Array(buffer); + } + + /** + * Deserialize a vector from binary format + */ + protected deserializeVector(data: Uint8Array | ArrayBuffer): number[] { + const bytes = data instanceof ArrayBuffer ? new Uint8Array(data) : data; + const view = new DataView(bytes.buffer, bytes.byteOffset, bytes.byteLength); + const vector: number[] = []; + for (let i = 0; i < bytes.length; i += 4) { + vector.push(view.getFloat32(i, true)); // little-endian + } + return vector; + } + + /** + * Initialize the database schema + */ + protected async initialize(): Promise { + if (this.initialized) return; + + const tableName = `${this.tablePrefix}_vectors`; + + try { + await this.client.execute(` + CREATE TABLE IF NOT EXISTS ${tableName} ( + id TEXT PRIMARY KEY, + vector BLOB NOT NULL, + dimensions INTEGER NOT NULL, + metadata TEXT, + content TEXT, + created_at DATETIME DEFAULT CURRENT_TIMESTAMP, + updated_at DATETIME DEFAULT CURRENT_TIMESTAMP + ) + `); + + await this.client.execute( + `CREATE INDEX IF NOT EXISTS idx_${tableName}_created ON ${tableName}(created_at)`, + ); + + await this.client.execute( + `CREATE INDEX IF NOT EXISTS idx_${tableName}_dimensions ON ${tableName}(dimensions)`, + ); + + this.initialized = true; + this.logger.debug("Vector adapter initialized"); + } catch (error) { + this.logger.error("Failed to initialize vector adapter", error as Error); + throw error; + } + } + + /** + * Execute a database operation with retries + */ + protected async executeWithRetry(operation: () => Promise, context: string): Promise { + let lastError: Error | undefined; + let delay = this.retryDelayMs; + + for (let attempt = 1; attempt <= this.maxRetries; attempt++) { + try { + return await operation(); + } catch (error) { + lastError = error as Error; + this.logger.warn(`Operation failed (attempt ${attempt}): ${context}`, error as Error); + + if (attempt < this.maxRetries) { + await new Promise((resolve) => setTimeout(resolve, delay)); + delay *= 2; + } + } + } + + this.logger.error(`Operation failed after ${this.maxRetries} attempts: ${context}`, lastError); + throw lastError; + } + + async store(id: string, vector: number[], metadata?: Record): Promise { + await this.initialize(); + + if (!Array.isArray(vector) || vector.length === 0) { + throw new Error("Vector must be a non-empty array"); + } + + if (vector.length > this.maxVectorDimensions) { + throw new Error( + `Vector dimensions (${vector.length}) exceed maximum (${this.maxVectorDimensions})`, + ); + } + + if (this.dimensions === null) { + this.dimensions = vector.length; + } else if (vector.length !== this.dimensions) { + throw new Error( + `Vector dimension mismatch. Expected ${this.dimensions}, got ${vector.length}`, + ); + } + + const tableName = `${this.tablePrefix}_vectors`; + const serializedVector = this.serializeVector(vector); + const metadataJson = metadata ? safeStringify(metadata) : null; + + await this.executeWithRetry(async () => { + await this.client.execute({ + sql: ` + INSERT OR REPLACE INTO ${tableName} + (id, vector, dimensions, metadata, updated_at) + VALUES (?, ?, ?, ?, CURRENT_TIMESTAMP) + `, + args: [id, serializedVector, vector.length, metadataJson], + }); + }, `store vector ${id}`); + + if (this.vectorCache.size >= this.cacheSize) { + const firstKey = this.vectorCache.keys().next().value; + if (firstKey) this.vectorCache.delete(firstKey); + } + this.vectorCache.set(id, { id, vector, metadata }); + + this.logger.debug(`Vector stored: ${id} (${vector.length} dimensions)`); + } + + async storeBatch(items: VectorItem[]): Promise { + await this.initialize(); + + if (items.length === 0) return; + + const tableName = `${this.tablePrefix}_vectors`; + + for (let i = 0; i < items.length; i += this.batchSize) { + const batch = items.slice(i, i + this.batchSize); + + await this.executeWithRetry(async () => { + const stmts: { sql: string; args: any[] }[] = []; + for (const item of batch) { + if (!Array.isArray(item.vector) || item.vector.length === 0) { + throw new Error("Vector must be a non-empty array"); + } + if (this.dimensions === null) { + this.dimensions = item.vector.length; + } else if (item.vector.length !== this.dimensions) { + throw new Error( + `Vector dimension mismatch. Expected ${this.dimensions}, got ${item.vector.length}`, + ); + } + + const serializedVector = this.serializeVector(item.vector); + const metadataJson = item.metadata ? safeStringify(item.metadata) : null; + const content = item.content ?? null; + stmts.push({ + sql: `INSERT OR REPLACE INTO ${tableName} (id, vector, dimensions, metadata, content, updated_at) VALUES (?, ?, ?, ?, ?, CURRENT_TIMESTAMP)`, + args: [item.id, serializedVector, item.vector.length, metadataJson, content], + }); + } + await this.client.batch(stmts, "write"); + }, `storeBatch ${batch.length} vectors`); + + this.logger.debug(`Batch of ${batch.length} vectors stored`); + } + } + + async search(queryVector: number[], options?: VectorSearchOptions): Promise { + await this.initialize(); + + const { limit = 10, threshold = 0, filter } = options || {}; + + if (this.dimensions !== null && queryVector.length !== this.dimensions) { + throw new Error( + `Query vector dimension mismatch. Expected ${this.dimensions}, got ${queryVector.length}`, + ); + } + + const tableName = `${this.tablePrefix}_vectors`; + + let query = `SELECT id, vector, dimensions, metadata, content FROM ${tableName}`; + const args: any[] = []; + + if (this.dimensions !== null) { + query += " WHERE dimensions = ?"; + args.push(this.dimensions); + } + + const result = await this.executeWithRetry( + async () => await this.client.execute({ sql: query, args }), + "search vectors", + ); + + const searchResults: SearchResult[] = []; + + for (const row of result.rows) { + const id = row.id as string; + const vectorBlob = row.vector as Uint8Array | ArrayBuffer; + const metadataJson = row.metadata as string | null; + const content = (row.content as string | null) ?? undefined; + + const metadata = metadataJson ? JSON.parse(metadataJson) : undefined; + + if (filter && !this.matchesFilter(metadata, filter)) { + continue; + } + + const vector = this.deserializeVector(vectorBlob); + const similarity = cosineSimilarity(queryVector, vector); + const score = (similarity + 1) / 2; + + if (score >= threshold) { + searchResults.push({ + id, + vector, + metadata, + content, + score, + distance: 1 - similarity, + }); + } + } + + searchResults.sort((a, b) => b.score - a.score); + + return searchResults.slice(0, limit); + } + + private matchesFilter( + metadata: Record | undefined, + filter: Record, + ): boolean { + if (!metadata) { + return false; + } + + for (const [key, value] of Object.entries(filter)) { + if (metadata[key] !== value) { + return false; + } + } + + return true; + } + + async delete(id: string): Promise { + await this.initialize(); + + const tableName = `${this.tablePrefix}_vectors`; + + await this.executeWithRetry(async () => { + await this.client.execute({ + sql: `DELETE FROM ${tableName} WHERE id = ?`, + args: [id], + }); + }, `delete vector ${id}`); + + this.vectorCache.delete(id); + + this.logger.debug(`Vector deleted: ${id}`); + } + + async deleteBatch(ids: string[]): Promise { + await this.initialize(); + + if (ids.length === 0) return; + + const tableName = `${this.tablePrefix}_vectors`; + + for (let i = 0; i < ids.length; i += this.batchSize) { + const batch = ids.slice(i, i + this.batchSize); + const placeholders = batch.map(() => "?").join(","); + + await this.executeWithRetry(async () => { + await this.client.execute({ + sql: `DELETE FROM ${tableName} WHERE id IN (${placeholders})`, + args: batch, + }); + }, `deleteBatch ${batch.length} vectors`); + + for (const id of batch) { + this.vectorCache.delete(id); + } + + this.logger.debug(`Batch of ${batch.length} vectors deleted`); + } + } + + async clear(): Promise { + await this.initialize(); + + const tableName = `${this.tablePrefix}_vectors`; + + await this.executeWithRetry(async () => { + await this.client.execute(`DELETE FROM ${tableName}`); + }, "clear all vectors"); + + this.vectorCache.clear(); + this.dimensions = null; + + this.logger.debug("All vectors cleared"); + } + + async count(): Promise { + await this.initialize(); + + const tableName = `${this.tablePrefix}_vectors`; + + const result = await this.executeWithRetry( + async () => await this.client.execute(`SELECT COUNT(*) as count FROM ${tableName}`), + "count vectors", + ); + + const raw = result.rows[0]?.count as any; + if (typeof raw === "bigint") return Number(raw); + if (typeof raw === "string") return Number.parseInt(raw, 10) || 0; + return (raw as number) ?? 0; + } + + async get(id: string): Promise { + await this.initialize(); + + if (this.vectorCache.has(id)) { + const cached = this.vectorCache.get(id); + if (cached) { + return { + ...cached, + vector: [...cached.vector], + metadata: cached.metadata ? { ...cached.metadata } : undefined, + }; + } + } + + const tableName = `${this.tablePrefix}_vectors`; + + const result = await this.executeWithRetry( + async () => + await this.client.execute({ + sql: `SELECT id, vector, metadata, content FROM ${tableName} WHERE id = ?`, + args: [id], + }), + `get vector ${id}`, + ); + + if (result.rows.length === 0) { + return null; + } + + const row = result.rows[0]; + const vectorBlob = row.vector as Uint8Array | ArrayBuffer; + const metadataJson = row.metadata as string | null; + const content = row.content as string | null; + + const vector = this.deserializeVector(vectorBlob); + const metadata = metadataJson ? JSON.parse(metadataJson) : undefined; + + const item: VectorItem = { + id, + vector, + metadata, + content: content ?? undefined, + }; + + if (this.vectorCache.size >= this.cacheSize) { + const firstKey = this.vectorCache.keys().next().value; + if (firstKey) this.vectorCache.delete(firstKey); + } + this.vectorCache.set(id, item); + + return item; + } + + async close(): Promise { + this.vectorCache.clear(); + this.logger.debug("Vector adapter closed"); + } + + async getStats(): Promise<{ + count: number; + dimensions: number | null; + cacheSize: number; + tableSizeBytes: number; + }> { + await this.initialize(); + + const tableName = `${this.tablePrefix}_vectors`; + + const [countResult, sizeResult] = await Promise.all([ + this.executeWithRetry( + async () => + await this.client.execute( + `SELECT COUNT(*) as count, MAX(dimensions) as dims FROM ${tableName}`, + ), + "getStats count", + ), + this.executeWithRetry( + async () => + await this.client.execute({ + sql: `SELECT + COALESCE(SUM(LENGTH(id)),0) + + COALESCE(SUM(LENGTH(vector)),0) + + COALESCE(SUM(LENGTH(metadata)),0) + + COALESCE(SUM(LENGTH(content)),0) AS size + FROM ${tableName}`, + }), + "getStats size", + ), + ]); + + const row1 = countResult.rows[0] as any; + const row2 = sizeResult.rows[0] as any; + + const countRaw = row1?.count as any; + const dimsRaw = row1?.dims as any; + const sizeRaw = row2?.size as any; + + const normalize = (v: any): number => + typeof v === "bigint" + ? Number(v) + : typeof v === "string" + ? Number.parseInt(v, 10) || 0 + : (v ?? 0); + + return { + count: normalize(countRaw), + dimensions: dimsRaw != null ? normalize(dimsRaw) : this.dimensions, + cacheSize: this.vectorCache.size, + tableSizeBytes: normalize(sizeRaw), + }; + } +} diff --git a/packages/libsql/tsup.config.ts b/packages/libsql/tsup.config.ts index 0819104fd..52ba7850f 100644 --- a/packages/libsql/tsup.config.ts +++ b/packages/libsql/tsup.config.ts @@ -2,7 +2,7 @@ import { defineConfig } from "tsup"; import { markAsExternalPlugin } from "../shared/tsup-plugins/mark-as-external"; export default defineConfig({ - entry: ["src/index.ts"], + entry: ["src/index.ts", "src/edge.ts"], format: ["cjs", "esm"], splitting: false, sourcemap: true, diff --git a/website/deployment-docs/cloudflare-workers.md b/website/deployment-docs/cloudflare-workers.md index 261039886..eb5f101f7 100644 --- a/website/deployment-docs/cloudflare-workers.md +++ b/website/deployment-docs/cloudflare-workers.md @@ -57,7 +57,6 @@ type Env = { VOLTAGENT_SECRET_KEY?: string; }; -// LibSQL is not supported on Cloudflare Workers. Use InMemory or Postgres/Supabase instead. const memory = new Memory({ storage: new InMemoryStorageAdapter(), }); @@ -130,7 +129,6 @@ curl https://.workers.dev/ ## Feature limitations on serverless (edge) - **MCP client/server** are not available on serverless runtimes today. The current MCP implementation depends on Node.js stdio/network APIs. Run MCP providers on a Node deployment instead. -- **libSQL memory adapter** is not supported in Workers. The libSQL driver requires Node sockets. Use the bundled `InMemoryStorageAdapter` or connect to an external database (PostgreSQL/Supabase) via their HTTP clients. import Tabs from '@theme/Tabs'; import TabItem from '@theme/TabItem'; @@ -196,6 +194,29 @@ const memory = new Memory({ }), }); +const agent = new Agent({ + name: "serverless-assistant", + instructions: "Answer user questions quickly.", + model: openai("gpt-4o-mini"), + tools: [weatherTool], + memory, +}); +``` + + + + +```ts +import { Memory } from "@voltagent/core"; +import { LibSQLMemoryAdapter } from "@voltagent/libsql/edge"; + +const memory = new Memory({ + storage: new LibSQLMemoryAdapter({ + url: env.TURSO_URL, // libsql://your-db.turso.io + authToken: env.TURSO_AUTH_TOKEN, + }), +}); + const agent = new Agent({ name: "serverless-assistant", instructions: "Answer user questions quickly.", From f8cdfb519dc3c5999ba38a8c46e7a9d51b7da4d6 Mon Sep 17 00:00:00 2001 From: Nik Date: Wed, 24 Dec 2025 11:16:03 +0100 Subject: [PATCH 2/2] fix(libsql): use debug option in edge memory adapter logger --- packages/libsql/src/memory-v2-adapter-edge.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/libsql/src/memory-v2-adapter-edge.ts b/packages/libsql/src/memory-v2-adapter-edge.ts index e3b387b41..7fcd3b100 100644 --- a/packages/libsql/src/memory-v2-adapter-edge.ts +++ b/packages/libsql/src/memory-v2-adapter-edge.ts @@ -69,7 +69,7 @@ export class LibSQLMemoryAdapterEdge extends LibSQLMemoryCore { const logger = options.logger || AgentRegistry.getInstance().getGlobalLogger() || - createPinoLogger({ name: "libsql-memory-edge" }); + createPinoLogger({ name: "libsql-memory-edge", level: options.debug ? "debug" : "info" }); // Create LibSQL client using web-compatible import const client = createClient({