Skip to content
This repository was archived by the owner on May 16, 2025. It is now read-only.

Commit b33a2de

Browse files
fix tests, add example (not working yet)
1 parent c193c20 commit b33a2de

File tree

6 files changed

+509
-11
lines changed

6 files changed

+509
-11
lines changed

.gitignore

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -5,4 +5,3 @@ index.d.cts
55
node_modules
66
dist
77
.yarn
8-
.env
Lines changed: 173 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,173 @@
1+
import express from "express";
2+
3+
import { McpServer } from "@modelcontextprotocol/sdk/server/mcp.js";
4+
import { randomUUID } from "node:crypto";
5+
import { StreamableHTTPServerTransport } from "@modelcontextprotocol/sdk/server/streamableHttp.js";
6+
import { SSEServerTransport } from "@modelcontextprotocol/sdk/server/sse.js";
7+
import { isInitializeRequest } from "@modelcontextprotocol/sdk/types.js";
8+
import { z } from "zod";
9+
10+
export async function main() {
11+
const server = new McpServer({
12+
name: "backwards-compatible-server",
13+
version: "1.0.0",
14+
});
15+
16+
const calcSchema = { a: z.number(), b: z.number() };
17+
18+
server.tool(
19+
"add",
20+
"Adds two numbers together",
21+
calcSchema,
22+
async ({ a, b }: { a: number; b: number }, extra) => {
23+
return {
24+
content: [{ type: "text", text: `${a + b}` }],
25+
};
26+
}
27+
);
28+
29+
server.tool(
30+
"subtract",
31+
"Subtracts two numbers",
32+
calcSchema,
33+
async ({ a, b }: { a: number; b: number }, extra) => {
34+
return { content: [{ type: "text", text: `${a - b}` }] };
35+
}
36+
);
37+
38+
server.tool(
39+
"multiply",
40+
"Multiplies two numbers",
41+
calcSchema,
42+
async ({ a, b }: { a: number; b: number }, extra) => {
43+
return { content: [{ type: "text", text: `${a * b}` }] };
44+
}
45+
);
46+
47+
server.tool(
48+
"divide",
49+
"Divides two numbers",
50+
calcSchema,
51+
async ({ a, b }: { a: number; b: number }, extra) => {
52+
return { content: [{ type: "text", text: `${a / b}` }] };
53+
}
54+
);
55+
56+
const app = express();
57+
app.use(express.json());
58+
59+
// Store transports for each session type
60+
const transports = {
61+
streamable: {} as Record<string, StreamableHTTPServerTransport>,
62+
sse: {} as Record<string, SSEServerTransport>,
63+
};
64+
65+
// Modern Streamable HTTP endpoint
66+
app.post("/mcp", async (req, res) => {
67+
// Check for existing session ID
68+
const sessionId = req.headers["mcp-session-id"] as string | undefined;
69+
let transport: StreamableHTTPServerTransport;
70+
71+
if (sessionId && transports.streamable[sessionId]) {
72+
// Reuse existing transport
73+
transport = transports.streamable[sessionId];
74+
} else if (!sessionId && isInitializeRequest(req.body)) {
75+
// New initialization request
76+
transport = new StreamableHTTPServerTransport({
77+
sessionIdGenerator: () => randomUUID(),
78+
onsessioninitialized: (sessionId) => {
79+
// Store the transport by session ID
80+
transports.streamable[sessionId] = transport;
81+
},
82+
});
83+
84+
// Clean up transport when closed
85+
transport.onclose = () => {
86+
if (transport.sessionId) {
87+
delete transports.streamable[transport.sessionId];
88+
}
89+
};
90+
91+
// Connect to the MCP server
92+
await server.connect(transport);
93+
} else {
94+
// Invalid request
95+
console.error(
96+
"Invalid Streamable HTTP request: ",
97+
JSON.stringify(req.body, null, 2)
98+
);
99+
res.status(400).json({
100+
jsonrpc: "2.0",
101+
error: {
102+
code: -32000,
103+
message: "Bad Request: No valid session ID provided",
104+
},
105+
id: null,
106+
});
107+
return;
108+
}
109+
110+
// Handle the request
111+
await transport.handleRequest(req, res, req.body);
112+
});
113+
114+
// Reusable handler for GET and DELETE requests
115+
const handleSessionRequest = async (
116+
req: express.Request,
117+
res: express.Response
118+
) => {
119+
const sessionId = req.headers["mcp-session-id"] as string | undefined;
120+
if (!sessionId || !transports.streamable[sessionId]) {
121+
console.error(
122+
"Invalid Streamable HTTP request (invalid/missing session ID): ",
123+
JSON.stringify(req.body, null, 2)
124+
);
125+
res.status(400).send("Invalid or missing session ID");
126+
return;
127+
}
128+
129+
const transport = transports.streamable[sessionId];
130+
await transport.handleRequest(req, res);
131+
};
132+
133+
app.get("/mcp", handleSessionRequest);
134+
app.delete("/mcp", handleSessionRequest);
135+
136+
// Legacy SSE endpoint for older clients
137+
app.get("/sse", async (req, res) => {
138+
// Create SSE transport for legacy clients
139+
const transport = new SSEServerTransport("/messages", res);
140+
transports.sse[transport.sessionId] = transport;
141+
142+
res.on("close", () => {
143+
delete transports.sse[transport.sessionId];
144+
});
145+
146+
await server.connect(transport);
147+
});
148+
149+
// Legacy message endpoint for older clients
150+
app.post("/messages", async (req, res) => {
151+
const sessionId = req.query.sessionId as string;
152+
const transport = transports.sse[sessionId];
153+
if (transport) {
154+
await transport.handlePostMessage(req, res, req.body);
155+
} else {
156+
console.error("No transport found for sessionId", sessionId);
157+
res.status(400).send("No transport found for sessionId");
158+
}
159+
});
160+
161+
app.listen(3000);
162+
}
163+
164+
if (typeof require !== "undefined" && require.main === module) {
165+
main().catch(console.error);
166+
}
167+
168+
if (
169+
import.meta.url === process.argv[1] ||
170+
import.meta.url === `file://${process.argv[1]}`
171+
) {
172+
main().catch(console.error);
173+
}
Lines changed: 207 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,207 @@
1+
/**
2+
* Calculator MCP Server with LangGraph Example
3+
*
4+
* This example demonstrates how to use the Calculator MCP server with LangGraph
5+
* to create a structured workflow for simple calculations.
6+
*
7+
* The graph-based approach allows:
8+
* 1. Clear separation of responsibilities (reasoning vs execution)
9+
* 2. Conditional routing based on tool calls
10+
* 3. Structured handling of complex multi-tool operations
11+
*/
12+
13+
/* eslint-disable no-console */
14+
import { ChatOpenAI } from "@langchain/openai";
15+
import {
16+
StateGraph,
17+
END,
18+
START,
19+
MessagesAnnotation,
20+
} from "@langchain/langgraph";
21+
import { ToolNode } from "@langchain/langgraph/prebuilt";
22+
import {
23+
HumanMessage,
24+
AIMessage,
25+
SystemMessage,
26+
isHumanMessage,
27+
} from "@langchain/core/messages";
28+
import dotenv from "dotenv";
29+
import fs from "fs";
30+
import path from "path";
31+
32+
import { main as calculatorServerMain } from "./calculator_server_shttp_sse.js";
33+
34+
// MCP client imports
35+
import { MultiServerMCPClient } from "../src/index.js";
36+
37+
// Load environment variables from .env file
38+
dotenv.config();
39+
40+
const transportType =
41+
process.env.MCP_TRANSPORT_TYPE === "sse" ? "sse" : "streamable";
42+
43+
export async function runExample(client?: MultiServerMCPClient) {
44+
try {
45+
console.log("Initializing MCP client...");
46+
47+
void calculatorServerMain();
48+
49+
// Wait for the server to start
50+
await new Promise((resolve) => {
51+
setTimeout(resolve, 100);
52+
});
53+
54+
// Create a client with configurations for the calculator server
55+
// eslint-disable-next-line no-param-reassign
56+
client =
57+
client ??
58+
new MultiServerMCPClient({
59+
calculator: {
60+
url: `http://localhost:3000/${
61+
transportType === "sse" ? "sse" : "mcp"
62+
}`,
63+
},
64+
});
65+
66+
console.log("Connected to server");
67+
68+
// Get all tools (flattened array is the default now)
69+
const mcpTools = await client.getTools();
70+
71+
if (mcpTools.length === 0) {
72+
throw new Error("No tools found");
73+
}
74+
75+
console.log(
76+
`Loaded ${mcpTools.length} MCP tools: ${mcpTools
77+
.map((tool) => tool.name)
78+
.join(", ")}`
79+
);
80+
81+
// Create an OpenAI model with tools attached
82+
const systemMessage = `You are an assistant that helps users with calculations.
83+
You have access to tools that can add, subtract, multiply, and divide numbers. Use
84+
these tools to answer the user's questions.`;
85+
86+
const model = new ChatOpenAI({
87+
modelName: process.env.OPENAI_MODEL_NAME || "gpt-4o-mini",
88+
temperature: 0.7,
89+
}).bindTools(mcpTools);
90+
91+
// Create a tool node for the LangGraph
92+
const toolNode = new ToolNode(mcpTools);
93+
94+
// ================================================
95+
// Create a LangGraph agent flow
96+
// ================================================
97+
console.log("\n=== CREATING LANGGRAPH AGENT FLOW ===");
98+
99+
// Define the function that calls the model
100+
const llmNode = async (state: typeof MessagesAnnotation.State) => {
101+
console.log(`Calling LLM with ${state.messages.length} messages`);
102+
103+
// Add system message if it's the first call
104+
let { messages } = state;
105+
if (messages.length === 1 && isHumanMessage(messages[0])) {
106+
messages = [new SystemMessage(systemMessage), ...messages];
107+
}
108+
109+
const response = await model.invoke(messages);
110+
return { messages: [response] };
111+
};
112+
113+
// Create a new graph with MessagesAnnotation
114+
const workflow = new StateGraph(MessagesAnnotation)
115+
116+
// Add the nodes to the graph
117+
.addNode("llm", llmNode)
118+
.addNode("tools", toolNode)
119+
120+
// Add edges - these define how nodes are connected
121+
.addEdge(START, "llm")
122+
.addEdge("tools", "llm")
123+
124+
// Conditional routing to end or continue the tool loop
125+
.addConditionalEdges("llm", (state) => {
126+
const lastMessage = state.messages[state.messages.length - 1];
127+
128+
// Cast to AIMessage to access tool_calls property
129+
const aiMessage = lastMessage as AIMessage;
130+
if (aiMessage.tool_calls && aiMessage.tool_calls.length > 0) {
131+
console.log("Tool calls detected, routing to tools node");
132+
133+
// Log what tools are being called
134+
const toolNames = aiMessage.tool_calls
135+
.map((tc) => tc.name)
136+
.join(", ");
137+
console.log(`Tools being called: ${toolNames}`);
138+
139+
return "tools";
140+
}
141+
142+
// If there are no tool calls, we're done
143+
console.log("No tool calls, ending the workflow");
144+
return END;
145+
});
146+
147+
// Compile the graph
148+
const app = workflow.compile();
149+
150+
// Define examples to run
151+
const examples = [
152+
{
153+
name: "Add 1 and 2",
154+
query: "What is 1 + 2?",
155+
},
156+
{
157+
name: "Subtract 1 from 2",
158+
query: "What is 2 - 1?",
159+
},
160+
{
161+
name: "Multiply 1 and 2",
162+
query: "What is 1 * 2?",
163+
},
164+
{
165+
name: "Divide 1 by 2",
166+
query: "What is 1 / 2?",
167+
},
168+
];
169+
170+
// Run the examples
171+
console.log("\n=== RUNNING LANGGRAPH AGENT ===");
172+
173+
for (const example of examples) {
174+
console.log(`\n--- Example: ${example.name} ---`);
175+
console.log(`Query: ${example.query}`);
176+
177+
// Run the LangGraph agent
178+
const result = await app.invoke({
179+
messages: [new HumanMessage(example.query)],
180+
});
181+
182+
// Display the final answer
183+
const finalMessage = result.messages[result.messages.length - 1];
184+
console.log(`\nResult: ${finalMessage.content}`);
185+
}
186+
} catch (error) {
187+
console.error("Error:", error);
188+
process.exit(1); // Exit with error code
189+
} finally {
190+
if (client) {
191+
await client.close();
192+
console.log("Closed all MCP connections");
193+
}
194+
195+
// Exit process after a short delay to allow for cleanup
196+
setTimeout(() => {
197+
console.log("Example completed, exiting process.");
198+
process.exit(0);
199+
}, 500);
200+
}
201+
}
202+
203+
const isMainModule = import.meta.url === `file://${process.argv[1]}`;
204+
205+
if (isMainModule) {
206+
runExample().catch((error) => console.error("Setup error:", error));
207+
}

0 commit comments

Comments
 (0)