Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 4 additions & 0 deletions examples/with-lancedb/.env.example
Original file line number Diff line number Diff line change
@@ -0,0 +1,4 @@
LANCEDB_URI=.voltagent/lancedb
OPENAI_API_KEY=your_openai_api_key_here
# Optional: LanceDB Cloud URI (e.g. lancedb+cloud://...) or local path
# LANCEDB_URI=.voltagent/lancedb
5 changes: 5 additions & 0 deletions examples/with-lancedb/.gitignore
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
node_modules
dist
.env
.voltagent
.DS_Store
51 changes: 51 additions & 0 deletions examples/with-lancedb/README.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,51 @@
# VoltAgent with LanceDB Example

This example demonstrates how to use [LanceDB](https://lancedb.github.io/lancedb/) as a vector database/retriever within a VoltAgent application.

## Features

- **Local & Serverless**: Uses [LanceDB](https://lancedb.github.io/lancedb/) which runs embedded locally—no Docker or API keys required (unless using LanceDB Cloud).
- **Multimodal Ready**: LanceDB is optimized for multimodal data (text, images, video), making this a future-proof foundation.
- **Automatic Initialization**: Automatically creates the knowledge base table and populates it with sample data on first run.
- **Semantic Search**: Uses OpenAI embeddings to retrieve relevant documents based on user queries.
- **Two Agent Patterns**:
1. **Assistant with Retriever**: Automatically uses retrieved context for every message.
2. **Assistant with Tools**: Autonomously decides when to use the retrieval tool.

## Prerequisites

- Node.js 20+
- OpenAI API Key (for embeddings and LLM)

## Getting Started

1. **Install dependencies**:

```bash
npm install
```

2. **Configure Environment**:
Copy `.env.example` to `.env` and add your OpenAI API Key:

```bash
cp .env.example .env
```

Edit `.env`:

```env
OPENAI_API_KEY=sk-...
```

3. **Run the Agent**:
```bash
npm run dev
```

## How It Works

- The database is stored locally in `.voltagent/lancedb`.
- On startup, `src/retriever/index.ts` checks if the table exists.
- If not, it creates it and indexes the sample documents defined in the code.
- Agents can then query this local database with low latency.
32 changes: 32 additions & 0 deletions examples/with-lancedb/package.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,32 @@
{
"name": "voltagent-example-with-lancedb",
"version": "1.0.0",
"description": "Example demonstrating VoltAgent integration with LanceDB for serverless vector storage",
"private": true,
"main": "dist/index.js",
"type": "module",
"scripts": {
"build": "tsc",
"dev": "tsx watch --env-file=.env ./src/index.ts",
"start": "node dist/index.js"
},
"keywords": [],
"author": "",
"license": "ISC",
"devDependencies": {
"@types/node": "^24.6.2",
"tsx": "^4.20.4",
"typescript": "^5.9.3"
},
"dependencies": {
"@ai-sdk/openai": "^3.0.1",
"@lancedb/lancedb": "^0.23.0",
"@voltagent/core": "workspace:^",
"@voltagent/libsql": "workspace:^",
"@voltagent/logger": "workspace:^",
"@voltagent/server-hono": "workspace:^",
"ai": "^6.0.0",
"openai": "^6.15.0",
"zod": "^3.25.76"
}
}
53 changes: 53 additions & 0 deletions examples/with-lancedb/src/index.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,53 @@
import { openai } from "@ai-sdk/openai";
import { Agent, Memory, VoltAgent } from "@voltagent/core";
import { LibSQLMemoryAdapter } from "@voltagent/libsql";
import { createPinoLogger } from "@voltagent/logger";
import { honoServer } from "@voltagent/server-hono";

import { retriever } from "./retriever/index.js";

// Create logger
const logger = createPinoLogger({
name: "with-lancedb",
level: "info",
});

// Create LibSQL storage for persistent memory
const memory = new Memory({
storage: new LibSQLMemoryAdapter({
url: "file:./.voltagent/memory.db",
}),
});

// Agent 1: Automatic Retrieval
const agentWithRetriever = new Agent({
name: "Assistant with Retriever",
instructions:
"You are a helpful assistant. You have access to a knowledge base about VoltAgent and LanceDB. You automatically retrieve relevant information to answer user questions.",
model: openai("gpt-4o-mini"),
retriever: retriever,
memory,
});

// Agent 2: Tool-based Retrieval
const agentWithTools = new Agent({
name: "Assistant with Tools",
instructions:
"You represent a helpful assistant that can search the knowledge base using tools. Decide when to search based on the user's question.",
model: openai("gpt-4o-mini"),
tools: [retriever.tool],
memory,
});

// Initialize VoltAgent
new VoltAgent({
agents: {
agentWithRetriever,
agentWithTools,
},
logger,
server: honoServer({ port: 3000 }),
});

console.log("🚀 VoltAgent with LanceDB is running!");
console.log("Try asking: 'What is VoltAgent?' or 'Tell me about LanceDB'");
173 changes: 173 additions & 0 deletions examples/with-lancedb/src/retriever/index.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,173 @@
import fs from "node:fs/promises";
import path from "node:path";
import { openai } from "@ai-sdk/openai";
import { type Connection, type Table, connect } from "@lancedb/lancedb";
import { type BaseMessage, BaseRetriever, type RetrieveOptions } from "@voltagent/core";
import { embed } from "ai";

const tableName = "voltagent-knowledge-base";
const dbUri = process.env.LANCEDB_URI || path.resolve(process.cwd(), ".voltagent/lancedb");

const sampleDocuments = [
{
text: "LanceDB is a developer-friendly, serverless vector database for AI applications.",
metadata: {
category: "database",
source: "documentation",
title: "What is LanceDB",
},
},
{
text: "VoltAgent is an open-source TypeScript framework for building AI agents.",
metadata: {
category: "framework",
source: "documentation",
title: "What is VoltAgent",
},
},
{
text: "Vector embeddings capture semantic meaning in high-dimensional space.",
metadata: {
category: "concept",
source: "documentation",
title: "Vector Embeddings",
},
},
];

let db: Connection | null = null;
let table: Table | null = null;

async function getEmbedding(text: string): Promise<number[]> {
const { embedding } = await embed({
model: openai.embedding("text-embedding-3-small"),
value: text,
});
return embedding;
}

// Ensure directory exists for safety (only if local path)
async function ensureDbDir() {
try {
if (!dbUri.startsWith("lancedb+")) {
await fs.mkdir(path.dirname(dbUri), { recursive: true });
}
} catch (_e) {
// Ignore if exists
}
}

async function initializeIndex() {
try {
await ensureDbDir();

db = await connect(dbUri);
console.log(`Connected to LanceDB at ${dbUri}`);

const tableNames = await db.tableNames();

if (tableNames.includes(tableName)) {
table = await db.openTable(tableName);
const count = await table.countRows();
console.log(`📋 Table "${tableName}" exists with ${count} records`);
} else {
console.log(`📋 Creating new table "${tableName}"...`);
console.log("📚 Generating embeddings for sample documents...");

const recordsWithEmbeddings = [];

for (const doc of sampleDocuments) {
try {
const vector = await getEmbedding(doc.text);
recordsWithEmbeddings.push({
text: doc.text,
...doc.metadata,
vector,
});
} catch (error) {
console.error(`Error generating embedding for "${doc.metadata.title}":`, error);
}
}

if (recordsWithEmbeddings.length > 0) {
// Create table with sample data
table = await db.createTable(tableName, recordsWithEmbeddings);
console.log(`✅ Table "${tableName}" created with ${recordsWithEmbeddings.length} records`);
} else {
console.warn("⚠️ No embeddings generated. Table not created.");
}
}
} catch (error) {
console.error("Error initializing LanceDB:", error);
}
}

// Start initialization
const initPromise = initializeIndex();

export class LanceDBRetriever extends BaseRetriever {
/**
* Retrieve documents from LanceDB based on semantic similarity
*/
async retrieve(input: string | BaseMessage[], options: RetrieveOptions): Promise<string> {
// Ensure initialized
if (!table) {
await initPromise;
if (!table) return "Knowledge base is not initialized yet.";
}

// Determine search text
let searchText = "";
if (typeof input === "string") {
searchText = input;
} else if (Array.isArray(input) && input.length > 0) {
const lastMessage = input[input.length - 1];
if (Array.isArray(lastMessage.content)) {
const textParts = lastMessage.content
.filter((part: any) => part.type === "text")
.map((part: any) => part.text);
searchText = textParts.join(" ");
} else {
searchText = lastMessage.content as string;
}
}

try {
const queryVector = await getEmbedding(searchText);

// Perform vector search
// Default metric is L2 (Euclidean). For normalized embeddings (like OpenAI),
// L2 corresponds to Cosine distance.
const results = await table.vectorSearch(queryVector).limit(3).toArray();

// Track sources in context
if (options.context && results.length > 0) {
const references = results.map((doc: any, index: number) => ({
id: `ref-${index}`,
title: doc.title || `Document ${index + 1}`,
source: "LanceDB",
category: doc.category,
score: doc._distance, // LanceDB returns distance
}));
options.context.set("references", references);
}

if (results.length === 0) {
return "No relevant documents found.";
}

// Format results for the LLM
return results
.map(
(doc: any, index: number) =>
`Document ${index + 1} (Title: ${doc.title}, Category: ${doc.category}):\n${doc.text}`,
)
.join("\n\n---\n\n");
} catch (error) {
console.error("Error retrieving documents from LanceDB:", error);
return "Error retrieving documents.";
}
}
}

export const retriever = new LanceDBRetriever();
13 changes: 13 additions & 0 deletions examples/with-lancedb/tsconfig.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,13 @@
{
"extends": "../../tsconfig.json",
"compilerOptions": {
"rootDir": "./src",
"outDir": "./dist",
"noEmit": false,
"composite": false,
"incremental": true,
"tsBuildInfoFile": "./dist/.tsbuildinfo"
},
"include": ["src/**/*"],
"exclude": ["node_modules", "dist", "**/*.test.ts", "**/*.spec.ts"]
}
Loading