diff --git a/src/app-bridge.test.ts b/src/app-bridge.test.ts index 969030fb..19951a38 100644 --- a/src/app-bridge.test.ts +++ b/src/app-bridge.test.ts @@ -215,6 +215,21 @@ describe("App <-> AppBridge integration", () => { expect(receivedCancellations[0]).toEqual({}); }); + it("tool notifications work with default no-op handlers", async () => { + // Don't set any custom handlers - use defaults + await app.connect(appTransport); + + // These should not throw (default handlers silently accept them) + // Just verify they complete without error + await bridge.sendToolInput({ arguments: {} }); + await bridge.sendToolInputPartial({ arguments: {} }); + await bridge.sendToolResult({ content: [{ type: "text", text: "ok" }] }); + await bridge.sendToolCancelled({}); + + // If we got here without throwing, the test passes + expect(true).toBe(true); + }); + it("setHostContext triggers app.onhostcontextchanged", async () => { const receivedContexts: unknown[] = []; app.onhostcontextchanged = (params) => { diff --git a/src/app.ts b/src/app.ts index 31885302..b5b519b9 100644 --- a/src/app.ts +++ b/src/app.ts @@ -16,7 +16,6 @@ import { PingRequestSchema, } from "@modelcontextprotocol/sdk/types.js"; import { AppNotification, AppRequest, AppResult } from "./types"; -import { PostMessageTransport } from "./message-transport"; import { LATEST_PROTOCOL_VERSION, McpUiAppCapabilities, @@ -47,8 +46,12 @@ import { McpUiRequestDisplayModeResultSchema, } from "./types"; import { Transport } from "@modelcontextprotocol/sdk/shared/transport.js"; +import { PostMessageTransport } from "./message-transport"; +import { OpenAITransport, isOpenAIEnvironment } from "./openai/transport.js"; export { PostMessageTransport } from "./message-transport"; +export { OpenAITransport, isOpenAIEnvironment } from "./openai/transport"; +export * from "./openai/types"; export * from "./types"; export { applyHostStyleVariables, @@ -101,7 +104,7 @@ export const RESOURCE_MIME_TYPE = "text/html;profile=mcp-app"; * * @see ProtocolOptions from @modelcontextprotocol/sdk for inherited options */ -type AppOptions = ProtocolOptions & { +export type AppOptions = ProtocolOptions & { /** * Automatically report size changes to the host using ResizeObserver. * @@ -112,6 +115,19 @@ type AppOptions = ProtocolOptions & { * @default true */ autoResize?: boolean; + + /** + * Enable experimental OpenAI compatibility. + * + * When enabled (default), the App will auto-detect the environment: + * - If `window.openai` exists → use OpenAI Apps SDK + * - Otherwise → use MCP Apps protocol via PostMessageTransport + * + * Set to `false` to force MCP-only mode. + * + * @default true + */ + experimentalOAICompatibility?: boolean; }; type RequestHandlerExtra = Parameters< @@ -220,7 +236,10 @@ export class App extends Protocol { constructor( private _appInfo: Implementation, private _capabilities: McpUiAppCapabilities = {}, - private options: AppOptions = { autoResize: true }, + private options: AppOptions = { + autoResize: true, + experimentalOAICompatibility: true, + }, ) { super(options); @@ -229,9 +248,13 @@ export class App extends Protocol { return {}; }); - // Set up default handler to update _hostContext when notifications arrive. - // Users can override this by setting onhostcontextchanged. + // Set up default handlers for notifications. + // Users can override these by setting the corresponding on* properties. this.onhostcontextchanged = () => {}; + this.ontoolinput = () => {}; + this.ontoolinputpartial = () => {}; + this.ontoolresult = () => {}; + this.ontoolcancelled = () => {}; } /** @@ -989,47 +1012,73 @@ export class App extends Protocol { return () => resizeObserver.disconnect(); } + /** + * Create the default transport based on detected platform. + * @internal + */ + private createDefaultTransport(): Transport { + const experimentalOAI = this.options?.experimentalOAICompatibility ?? true; + if (experimentalOAI && isOpenAIEnvironment()) { + return new OpenAITransport(); + } + return new PostMessageTransport(window.parent); + } + /** * Establish connection with the host and perform initialization handshake. * * This method performs the following steps: - * 1. Connects the transport layer - * 2. Sends `ui/initialize` request with app info and capabilities - * 3. Receives host capabilities and context in response - * 4. Sends `ui/notifications/initialized` notification - * 5. Sets up auto-resize using {@link setupSizeChangedNotifications} if enabled (default) + * 1. Auto-detects platform if no transport is provided + * 2. Connects the transport layer + * 3. Sends `ui/initialize` request with app info and capabilities + * 4. Receives host capabilities and context in response + * 5. Sends `ui/notifications/initialized` notification + * 6. Sets up auto-resize using {@link setupSizeChangedNotifications} if enabled (default) + * 7. For OpenAI mode: delivers initial tool input/result from window.openai * * If initialization fails, the connection is automatically closed and an error * is thrown. * - * @param transport - Transport layer (typically PostMessageTransport) + * @param transport - Optional transport layer. If not provided, auto-detects + * based on the `platform` option: + * - `'openai'` or `window.openai` exists → uses {@link OpenAITransport} + * - `'mcp'` or no `window.openai` → uses {@link PostMessageTransport} * @param options - Request options for the initialize request * * @throws {Error} If initialization fails or connection is lost * - * @example Connect with PostMessageTransport + * @example Auto-detect platform (recommended) * ```typescript * const app = new App( * { name: "MyApp", version: "1.0.0" }, * {} * ); * - * try { - * await app.connect(new PostMessageTransport(window.parent)); - * console.log("Connected successfully!"); - * } catch (error) { - * console.error("Failed to connect:", error); - * } + * // Auto-detects: OpenAI if window.openai exists, MCP otherwise + * await app.connect(); + * ``` + * + * @example Explicit MCP transport + * ```typescript + * await app.connect(new PostMessageTransport(window.parent)); + * ``` + * + * @example Explicit OpenAI transport + * ```typescript + * await app.connect(new OpenAITransport()); * ``` * * @see {@link McpUiInitializeRequest} for the initialization request structure * @see {@link McpUiInitializedNotification} for the initialized notification - * @see {@link PostMessageTransport} for the typical transport implementation + * @see {@link PostMessageTransport} for MCP-compatible hosts + * @see {@link OpenAITransport} for OpenAI/ChatGPT hosts */ override async connect( - transport: Transport = new PostMessageTransport(window.parent), + transport?: Transport, options?: RequestOptions, ): Promise { + transport ??= this.createDefaultTransport(); + await super.connect(transport); try { @@ -1061,6 +1110,11 @@ export class App extends Protocol { if (this.options?.autoResize) { this.setupSizeChangedNotifications(); } + + // For OpenAI mode: deliver initial state from window.openai + if (transport instanceof OpenAITransport) { + transport.deliverInitialState(); + } } catch (error) { // Disconnect if initialization fails. void this.close(); diff --git a/src/openai/transport.test.ts b/src/openai/transport.test.ts new file mode 100644 index 00000000..962a0c6c --- /dev/null +++ b/src/openai/transport.test.ts @@ -0,0 +1,496 @@ +import { describe, test, expect, beforeEach, afterEach, mock } from "bun:test"; +import { OpenAITransport, isOpenAIEnvironment } from "./transport"; +import type { OpenAIGlobal, WindowWithOpenAI } from "./types"; + +describe("isOpenAIEnvironment", () => { + const originalWindow = globalThis.window; + + afterEach(() => { + // Restore original window + if (originalWindow === undefined) { + delete (globalThis as { window?: unknown }).window; + } else { + (globalThis as { window?: unknown }).window = originalWindow; + } + }); + + test("returns false when window is undefined", () => { + delete (globalThis as { window?: unknown }).window; + expect(isOpenAIEnvironment()).toBe(false); + }); + + test("returns false when window.openai is undefined", () => { + (globalThis as { window?: unknown }).window = {}; + expect(isOpenAIEnvironment()).toBe(false); + }); + + test("returns true when window.openai is an object", () => { + (globalThis as { window?: unknown }).window = { + openai: {}, + }; + expect(isOpenAIEnvironment()).toBe(true); + }); +}); + +describe("OpenAITransport", () => { + let mockOpenAI: OpenAIGlobal; + + beforeEach(() => { + mockOpenAI = { + theme: "dark", + locale: "en-US", + displayMode: "inline", + maxHeight: 600, + toolInput: { location: "Tokyo" }, + toolOutput: { temperature: 22 }, + callTool: mock(() => + Promise.resolve({ content: { result: "success" } }), + ) as unknown as OpenAIGlobal["callTool"], + sendFollowUpMessage: mock(() => + Promise.resolve(), + ) as unknown as OpenAIGlobal["sendFollowUpMessage"], + openExternal: mock(() => + Promise.resolve(), + ) as unknown as OpenAIGlobal["openExternal"], + notifyIntrinsicHeight: mock( + () => {}, + ) as unknown as OpenAIGlobal["notifyIntrinsicHeight"], + }; + + (globalThis as { window?: unknown }).window = { + openai: mockOpenAI, + }; + }); + + afterEach(() => { + delete (globalThis as { window?: unknown }).window; + }); + + test("throws when window.openai is not available", () => { + delete (globalThis as { window?: unknown }).window; + expect(() => new OpenAITransport()).toThrow( + "OpenAITransport requires window.openai", + ); + }); + + test("constructs successfully when window.openai is available", () => { + const transport = new OpenAITransport(); + expect(transport).toBeDefined(); + }); + + test("start() completes without error", async () => { + const transport = new OpenAITransport(); + await expect(transport.start()).resolves.toBeUndefined(); + }); + + test("close() calls onclose callback", async () => { + const transport = new OpenAITransport(); + const onclose = mock(() => {}); + transport.onclose = onclose; + + await transport.close(); + + expect(onclose).toHaveBeenCalled(); + }); + + describe("ui/initialize request", () => { + test("returns synthesized host info from window.openai", async () => { + const transport = new OpenAITransport(); + let response: unknown; + transport.onmessage = (msg) => { + response = msg; + }; + + await transport.send({ + jsonrpc: "2.0", + id: 1, + method: "ui/initialize", + params: { + protocolVersion: "2025-11-21", + appInfo: { name: "TestApp", version: "1.0.0" }, + appCapabilities: {}, + }, + }); + + // Wait for microtask to complete + await new Promise((resolve) => setTimeout(resolve, 0)); + + expect(response).toMatchObject({ + jsonrpc: "2.0", + id: 1, + result: { + hostInfo: { name: "ChatGPT", version: "1.0.0" }, + hostContext: { + theme: "dark", + locale: "en-US", + displayMode: "inline", + }, + }, + }); + }); + + test("dynamically reports capabilities based on available methods", async () => { + // Remove callTool to test dynamic detection + delete mockOpenAI.callTool; + + const transport = new OpenAITransport(); + let response: unknown; + transport.onmessage = (msg) => { + response = msg; + }; + + await transport.send({ + jsonrpc: "2.0", + id: 1, + method: "ui/initialize", + params: { + protocolVersion: "2025-11-21", + appInfo: { name: "TestApp", version: "1.0.0" }, + appCapabilities: {}, + }, + }); + + await new Promise((resolve) => setTimeout(resolve, 0)); + + const result = (response as { result: { hostCapabilities: unknown } }) + .result.hostCapabilities as Record; + + // serverTools should NOT be present since callTool is missing + expect(result.serverTools).toBeUndefined(); + // openLinks should be present since openExternal exists + expect(result.openLinks).toBeDefined(); + // logging is always available + expect(result.logging).toBeDefined(); + }); + + test("includes availableDisplayModes when requestDisplayMode is available", async () => { + mockOpenAI.requestDisplayMode = mock(() => + Promise.resolve(), + ) as unknown as OpenAIGlobal["requestDisplayMode"]; + + const transport = new OpenAITransport(); + let response: unknown; + transport.onmessage = (msg) => { + response = msg; + }; + + await transport.send({ + jsonrpc: "2.0", + id: 1, + method: "ui/initialize", + params: { + protocolVersion: "2025-11-21", + appInfo: { name: "TestApp", version: "1.0.0" }, + appCapabilities: {}, + }, + }); + + await new Promise((resolve) => setTimeout(resolve, 0)); + + expect(response).toMatchObject({ + jsonrpc: "2.0", + id: 1, + result: { + hostContext: { + availableDisplayModes: ["inline", "pip", "fullscreen"], + }, + }, + }); + }); + }); + + describe("tools/call request", () => { + test("delegates to window.openai.callTool()", async () => { + const transport = new OpenAITransport(); + let response: unknown; + transport.onmessage = (msg) => { + response = msg; + }; + + await transport.send({ + jsonrpc: "2.0", + id: 2, + method: "tools/call", + params: { + name: "get_weather", + arguments: { location: "Tokyo" }, + }, + }); + + await new Promise((resolve) => setTimeout(resolve, 0)); + + expect(mockOpenAI.callTool).toHaveBeenCalledWith("get_weather", { + location: "Tokyo", + }); + expect(response).toMatchObject({ + jsonrpc: "2.0", + id: 2, + result: expect.any(Object), + }); + }); + + test("returns error when callTool is not available", async () => { + delete mockOpenAI.callTool; + const transport = new OpenAITransport(); + let response: unknown; + transport.onmessage = (msg) => { + response = msg; + }; + + await transport.send({ + jsonrpc: "2.0", + id: 3, + method: "tools/call", + params: { name: "test_tool" }, + }); + + await new Promise((resolve) => setTimeout(resolve, 0)); + + expect(response).toMatchObject({ + jsonrpc: "2.0", + id: 3, + error: { + code: -32601, + message: expect.stringContaining("not supported"), + }, + }); + }); + }); + + describe("ui/message request", () => { + test("delegates to window.openai.sendFollowUpMessage()", async () => { + const transport = new OpenAITransport(); + let response: unknown; + transport.onmessage = (msg) => { + response = msg; + }; + + await transport.send({ + jsonrpc: "2.0", + id: 4, + method: "ui/message", + params: { + role: "user", + content: [{ type: "text", text: "Hello!" }], + }, + }); + + await new Promise((resolve) => setTimeout(resolve, 0)); + + expect(mockOpenAI.sendFollowUpMessage).toHaveBeenCalledWith({ + prompt: "Hello!", + }); + expect(response).toMatchObject({ + jsonrpc: "2.0", + id: 4, + result: {}, + }); + }); + }); + + describe("ui/open-link request", () => { + test("delegates to window.openai.openExternal()", async () => { + const transport = new OpenAITransport(); + let response: unknown; + transport.onmessage = (msg) => { + response = msg; + }; + + await transport.send({ + jsonrpc: "2.0", + id: 5, + method: "ui/open-link", + params: { url: "https://example.com" }, + }); + + await new Promise((resolve) => setTimeout(resolve, 0)); + + expect(mockOpenAI.openExternal).toHaveBeenCalledWith({ + href: "https://example.com", + }); + expect(response).toMatchObject({ + jsonrpc: "2.0", + id: 5, + result: {}, + }); + }); + }); + + describe("ui/request-display-mode request", () => { + test("delegates to window.openai.requestDisplayMode()", async () => { + mockOpenAI.requestDisplayMode = mock(() => + Promise.resolve(), + ) as unknown as OpenAIGlobal["requestDisplayMode"]; + + const transport = new OpenAITransport(); + let response: unknown; + transport.onmessage = (msg) => { + response = msg; + }; + + await transport.send({ + jsonrpc: "2.0", + id: 6, + method: "ui/request-display-mode", + params: { mode: "fullscreen" }, + }); + + await new Promise((resolve) => setTimeout(resolve, 0)); + + expect(mockOpenAI.requestDisplayMode).toHaveBeenCalledWith({ + mode: "fullscreen", + }); + expect(response).toMatchObject({ + jsonrpc: "2.0", + id: 6, + result: { mode: "fullscreen" }, + }); + }); + }); + + describe("ui/notifications/size-changed notification", () => { + test("delegates to window.openai.notifyIntrinsicHeight()", async () => { + const transport = new OpenAITransport(); + + await transport.send({ + jsonrpc: "2.0", + method: "ui/notifications/size-changed", + params: { width: 400, height: 300 }, + }); + + expect(mockOpenAI.notifyIntrinsicHeight).toHaveBeenCalledWith(300); + }); + }); + + describe("deliverInitialState", () => { + test("delivers tool input notification", async () => { + const transport = new OpenAITransport(); + const messages: unknown[] = []; + transport.onmessage = (msg) => { + messages.push(msg); + }; + + transport.deliverInitialState(); + + await new Promise((resolve) => setTimeout(resolve, 0)); + + const toolInputNotification = messages.find( + (m: unknown) => + (m as { method?: string }).method === "ui/notifications/tool-input", + ); + expect(toolInputNotification).toMatchObject({ + jsonrpc: "2.0", + method: "ui/notifications/tool-input", + params: { arguments: { location: "Tokyo" } }, + }); + }); + + test("delivers tool result notification", async () => { + const transport = new OpenAITransport(); + const messages: unknown[] = []; + transport.onmessage = (msg) => { + messages.push(msg); + }; + + transport.deliverInitialState(); + + await new Promise((resolve) => setTimeout(resolve, 0)); + + const toolResultNotification = messages.find( + (m: unknown) => + (m as { method?: string }).method === "ui/notifications/tool-result", + ); + expect(toolResultNotification).toBeDefined(); + }); + + test("includes _meta from toolResponseMetadata in tool result", async () => { + mockOpenAI.toolResponseMetadata = { widgetId: "abc123", version: 2 }; + + const transport = new OpenAITransport(); + const messages: unknown[] = []; + transport.onmessage = (msg) => { + messages.push(msg); + }; + + transport.deliverInitialState(); + + await new Promise((resolve) => setTimeout(resolve, 0)); + + const toolResultNotification = messages.find( + (m: unknown) => + (m as { method?: string }).method === "ui/notifications/tool-result", + ); + expect(toolResultNotification).toMatchObject({ + jsonrpc: "2.0", + method: "ui/notifications/tool-result", + params: { + _meta: { widgetId: "abc123", version: 2 }, + }, + }); + }); + + test("converts null _meta to undefined in tool result", async () => { + // Simulate null being set (e.g., from JSON parsing where null is valid) + ( + mockOpenAI as unknown as { toolResponseMetadata: null } + ).toolResponseMetadata = null; + + const transport = new OpenAITransport(); + const messages: unknown[] = []; + transport.onmessage = (msg) => { + messages.push(msg); + }; + + transport.deliverInitialState(); + + await new Promise((resolve) => setTimeout(resolve, 0)); + + const toolResultNotification = messages.find( + (m: unknown) => + (m as { method?: string }).method === "ui/notifications/tool-result", + ) as { params?: { _meta?: unknown } } | undefined; + expect(toolResultNotification).toBeDefined(); + // _meta should be undefined, not null (SDK rejects null) + expect(toolResultNotification?.params?._meta).toBeUndefined(); + }); + + test("does not deliver tool-result when toolOutput is null", async () => { + // Simulate null being set (e.g., from JSON parsing) + (mockOpenAI as unknown as { toolOutput: null }).toolOutput = null; + + const transport = new OpenAITransport(); + const messages: unknown[] = []; + transport.onmessage = (msg) => { + messages.push(msg); + }; + + transport.deliverInitialState(); + + await new Promise((resolve) => setTimeout(resolve, 0)); + + const toolResultNotification = messages.find( + (m: unknown) => + (m as { method?: string }).method === "ui/notifications/tool-result", + ); + // Should NOT deliver tool-result when toolOutput is null + expect(toolResultNotification).toBeUndefined(); + }); + + test("does not deliver notifications when data is missing", async () => { + delete mockOpenAI.toolInput; + delete mockOpenAI.toolOutput; + + const transport = new OpenAITransport(); + const messages: unknown[] = []; + transport.onmessage = (msg) => { + messages.push(msg); + }; + + transport.deliverInitialState(); + + await new Promise((resolve) => setTimeout(resolve, 0)); + + expect(messages).toHaveLength(0); + }); + }); +}); diff --git a/src/openai/transport.ts b/src/openai/transport.ts new file mode 100644 index 00000000..bc87b7fe --- /dev/null +++ b/src/openai/transport.ts @@ -0,0 +1,609 @@ +/** + * Transport adapter for OpenAI Apps SDK (window.openai) compatibility. + * + * This transport allows MCP Apps to run in OpenAI's ChatGPT environment by + * translating between the MCP Apps protocol and the OpenAI Apps SDK APIs. + * + * @see https://developers.openai.com/apps-sdk/build/chatgpt-ui/ + */ + +import { + JSONRPCMessage, + JSONRPCRequest, + JSONRPCNotification, + RequestId, +} from "@modelcontextprotocol/sdk/types.js"; +import { + Transport, + TransportSendOptions, +} from "@modelcontextprotocol/sdk/shared/transport.js"; +import { OpenAIGlobal, getOpenAIGlobal, isOpenAIEnvironment } from "./types.js"; +import { LATEST_PROTOCOL_VERSION, McpUiHostContext } from "../spec.types.js"; + +/** + * JSON-RPC success response message. + * @internal + */ +interface JSONRPCSuccessResponse { + jsonrpc: "2.0"; + id: RequestId; + result: Record; +} + +/** + * JSON-RPC error response message. + * @internal + */ +interface JSONRPCErrorResponse { + jsonrpc: "2.0"; + id: RequestId; + error: { code: number; message: string; data?: unknown }; +} + +/** + * Check if a message is a JSON-RPC request (has method and id). + */ +function isRequest(message: JSONRPCMessage): message is JSONRPCRequest { + return "method" in message && "id" in message; +} + +/** + * Check if a message is a JSON-RPC notification (has method but no id). + */ +function isNotification( + message: JSONRPCMessage, +): message is JSONRPCNotification { + return "method" in message && !("id" in message); +} + +/** + * Transport implementation that bridges MCP Apps protocol to OpenAI Apps SDK. + * + * This transport enables MCP Apps to run seamlessly in ChatGPT by: + * - Synthesizing initialization responses from window.openai properties + * - Mapping tool calls to window.openai.callTool() + * - Mapping messages to window.openai.sendFollowUpMessage() + * - Mapping link opens to window.openai.openExternal() + * - Reporting size changes via window.openai.notifyIntrinsicHeight() + * + * ## Usage + * + * Typically you don't create this transport directly. The App will create + * it automatically when `experimentalOAICompatibility` is enabled (default) + * and `window.openai` is detected. + * + * ```typescript + * import { App } from '@modelcontextprotocol/ext-apps'; + * + * const app = new App({ name: "MyApp", version: "1.0.0" }, {}); + * await app.connect(); // Auto-detects OpenAI environment + * ``` + * + * ## Manual Usage + * + * For advanced use cases, you can create the transport directly: + * + * ```typescript + * import { App, OpenAITransport } from '@modelcontextprotocol/ext-apps'; + * + * const app = new App({ name: "MyApp", version: "1.0.0" }, {}); + * await app.connect(new OpenAITransport()); + * ``` + * + * @see {@link App.connect} for automatic transport selection + * @see {@link PostMessageTransport} for MCP-compatible hosts + */ +export class OpenAITransport implements Transport { + private openai: OpenAIGlobal; + private _closed = false; + + /** + * Create a new OpenAITransport. + * + * @throws {Error} If window.openai is not available + * + * @example + * ```typescript + * if (isOpenAIEnvironment()) { + * const transport = new OpenAITransport(); + * await app.connect(transport); + * } + * ``` + */ + constructor() { + const openai = getOpenAIGlobal(); + if (!openai) { + throw new Error( + "OpenAITransport requires window.openai to be available. " + + "This transport should only be used in OpenAI/ChatGPT environments.", + ); + } + this.openai = openai; + } + + /** + * Begin listening for messages. + * + * In OpenAI mode, there's no event-based message flow to start. + * The data is pre-populated in window.openai properties. + */ + async start(): Promise { + // Nothing to do - window.openai is already available and populated + } + + /** + * Send a JSON-RPC message. + * + * Requests are handled by mapping to window.openai methods. + * Notifications are handled for size changes; others are no-ops. + * + * @param message - JSON-RPC message to send + * @param _options - Send options (unused) + */ + async send( + message: JSONRPCMessage, + _options?: TransportSendOptions, + ): Promise { + if (this._closed) { + throw new Error("Transport is closed"); + } + + if (isRequest(message)) { + // Handle requests - map to window.openai methods and synthesize responses + const response = await this.handleRequest(message); + // Deliver response asynchronously to maintain message ordering + queueMicrotask(() => this.onmessage?.(response)); + } else if (isNotification(message)) { + // Handle notifications + this.handleNotification(message); + } + // Responses are ignored - we don't receive requests from OpenAI + } + + /** + * Handle an outgoing JSON-RPC request by mapping to window.openai. + */ + private async handleRequest( + request: JSONRPCRequest, + ): Promise { + const { method, id, params } = request; + + try { + switch (method) { + case "ui/initialize": + return this.handleInitialize(id); + + case "tools/call": + return await this.handleToolCall( + id, + params as { name: string; arguments?: Record }, + ); + + case "ui/message": + return await this.handleMessage( + id, + params as { role: string; content: unknown[] }, + ); + + case "ui/open-link": + return await this.handleOpenLink(id, params as { url: string }); + + case "ui/request-display-mode": + return await this.handleRequestDisplayMode( + id, + params as { mode: string }, + ); + + case "ping": + return this.createSuccessResponse(id, {}); + + default: + return this.createErrorResponse( + id, + -32601, + `Method not supported in OpenAI mode: ${method}`, + ); + } + } catch (error) { + return this.createErrorResponse( + id, + -32603, + error instanceof Error ? error.message : String(error), + ); + } + } + + /** + * Handle ui/initialize request by synthesizing response from window.openai. + */ + private handleInitialize(id: RequestId): JSONRPCSuccessResponse { + // Safely extract userAgent - could be string or object + let userAgent: string | undefined; + if (typeof this.openai.userAgent === "string") { + userAgent = this.openai.userAgent; + } else if ( + this.openai.userAgent && + typeof this.openai.userAgent === "object" + ) { + userAgent = JSON.stringify(this.openai.userAgent); + } + + // Safely extract safeAreaInsets - only include if all values are present + let safeAreaInsets: McpUiHostContext["safeAreaInsets"]; + const sa = this.openai.safeArea; + if ( + sa && + typeof sa.top === "number" && + typeof sa.right === "number" && + typeof sa.bottom === "number" && + typeof sa.left === "number" + ) { + safeAreaInsets = sa; + } + + const hostContext: McpUiHostContext = { + theme: this.openai.theme, + locale: this.openai.locale, + displayMode: this.openai.displayMode, + // If requestDisplayMode is available, ChatGPT supports all three modes + availableDisplayModes: this.openai.requestDisplayMode + ? ["inline", "pip", "fullscreen"] + : undefined, + viewport: this.openai.maxHeight + ? { width: 0, height: 0, maxHeight: this.openai.maxHeight } + : undefined, + safeAreaInsets, + userAgent, + }; + + // Dynamically determine capabilities based on what window.openai supports + const hostCapabilities: Record = { + // Logging is always available (we map to console.log) + logging: {}, + }; + + // Only advertise serverTools if callTool is available + if (this.openai.callTool) { + hostCapabilities.serverTools = {}; + } + + // Only advertise openLinks if openExternal is available + if (this.openai.openExternal) { + hostCapabilities.openLinks = {}; + } + + return this.createSuccessResponse(id, { + protocolVersion: LATEST_PROTOCOL_VERSION, + hostInfo: { + name: "ChatGPT", + version: "1.0.0", + }, + hostCapabilities, + hostContext, + }); + } + + /** + * Handle tools/call request by delegating to window.openai.callTool(). + */ + private async handleToolCall( + id: RequestId, + params: { name: string; arguments?: Record }, + ): Promise { + if (!this.openai.callTool) { + return this.createErrorResponse( + id, + -32601, + "Tool calls are not supported in this OpenAI environment", + ); + } + + const result = await this.openai.callTool(params.name, params.arguments); + + // Handle different response formats from OpenAI + // Could be { content: [...] }, { structuredContent: ... }, or the raw data + let content: { type: string; text: string }[]; + if (Array.isArray(result.content)) { + // Clean up content items - remove null values for annotations/_meta + content = result.content.map((item: unknown) => { + if ( + typeof item === "object" && + item !== null && + "type" in item && + "text" in item + ) { + const typedItem = item as { + type: string; + text: string; + annotations?: unknown; + _meta?: unknown; + }; + return { type: typedItem.type, text: typedItem.text }; + } + return { type: "text", text: JSON.stringify(item) }; + }); + } else if (result.structuredContent !== undefined) { + content = [ + { type: "text", text: JSON.stringify(result.structuredContent) }, + ]; + } else if (result.content !== undefined) { + content = [{ type: "text", text: JSON.stringify(result.content) }]; + } else { + // The result itself might be the structured content + content = [{ type: "text", text: JSON.stringify(result) }]; + } + + return this.createSuccessResponse(id, { + content, + isError: result.isError, + }); + } + + /** + * Handle ui/message request by delegating to window.openai.sendFollowUpMessage(). + */ + private async handleMessage( + id: RequestId, + params: { role: string; content: unknown[] }, + ): Promise { + if (!this.openai.sendFollowUpMessage) { + return this.createErrorResponse( + id, + -32601, + "Sending messages is not supported in this OpenAI environment", + ); + } + + // Extract text content from the message + const textContent = params.content + .filter( + (c): c is { type: "text"; text: string } => + typeof c === "object" && + c !== null && + (c as { type?: string }).type === "text", + ) + .map((c) => c.text) + .join("\n"); + + await this.openai.sendFollowUpMessage({ prompt: textContent }); + + return this.createSuccessResponse(id, {}); + } + + /** + * Handle ui/open-link request by delegating to window.openai.openExternal(). + */ + private async handleOpenLink( + id: RequestId, + params: { url: string }, + ): Promise { + if (!this.openai.openExternal) { + return this.createErrorResponse( + id, + -32601, + "Opening external links is not supported in this OpenAI environment", + ); + } + + await this.openai.openExternal({ href: params.url }); + + return this.createSuccessResponse(id, {}); + } + + /** + * Handle ui/request-display-mode by delegating to window.openai.requestDisplayMode(). + */ + private async handleRequestDisplayMode( + id: RequestId, + params: { mode: string }, + ): Promise { + if (!this.openai.requestDisplayMode) { + return this.createErrorResponse( + id, + -32601, + "Display mode changes are not supported in this OpenAI environment", + ); + } + + const mode = params.mode as "inline" | "pip" | "fullscreen"; + await this.openai.requestDisplayMode({ mode }); + + return this.createSuccessResponse(id, { mode }); + } + + /** + * Handle an outgoing notification. + */ + private handleNotification(notification: JSONRPCNotification): void { + const { method, params } = notification; + + switch (method) { + case "ui/notifications/size-changed": + this.handleSizeChanged(params as { width?: number; height?: number }); + break; + + case "ui/notifications/initialized": + // No-op - OpenAI doesn't need this notification + break; + + case "notifications/message": + // Log messages - could be sent to console in OpenAI mode + console.log("[MCP App Log]", params); + break; + + default: + // Ignore unknown notifications + break; + } + } + + /** + * Handle size changed notification by calling window.openai.notifyIntrinsicHeight(). + */ + private handleSizeChanged(params: { width?: number; height?: number }): void { + if (this.openai.notifyIntrinsicHeight && params.height !== undefined) { + this.openai.notifyIntrinsicHeight(params.height); + } + } + + /** + * Create a success JSON-RPC response. + */ + private createSuccessResponse( + id: RequestId, + result: Record, + ): JSONRPCSuccessResponse { + return { + jsonrpc: "2.0", + id, + result, + }; + } + + /** + * Create an error JSON-RPC response. + */ + private createErrorResponse( + id: RequestId, + code: number, + message: string, + ): JSONRPCErrorResponse { + return { + jsonrpc: "2.0", + id, + error: { code, message }, + }; + } + + /** + * Deliver initial tool input and result notifications. + * + * Called by App after connection to deliver pre-populated data from + * window.openai as notifications that the app's handlers expect. + * + * @internal + */ + deliverInitialState(): void { + // Deliver tool input if available + if (this.openai.toolInput !== undefined) { + queueMicrotask(() => { + this.onmessage?.({ + jsonrpc: "2.0", + method: "ui/notifications/tool-input", + params: { arguments: this.openai.toolInput }, + } as JSONRPCNotification); + }); + } + + // Deliver tool output if available (check for both null and undefined) + if (this.openai.toolOutput != null) { + queueMicrotask(() => { + // Normalize toolOutput to MCP CallToolResult format + let content: Array<{ + type: string; + text?: string; + [key: string]: unknown; + }>; + let structuredContent: Record | undefined; + const output = this.openai.toolOutput; + + // Check if output is already a CallToolResult-like object with content/structuredContent + if ( + typeof output === "object" && + output !== null && + ("content" in output || "structuredContent" in output) + ) { + const result = output as { + content?: unknown; + structuredContent?: Record; + }; + // Prefer structuredContent if available + if (result.structuredContent !== undefined) { + structuredContent = result.structuredContent; + // Generate content from structuredContent if not provided + content = Array.isArray(result.content) + ? result.content + : [{ type: "text", text: JSON.stringify(result.structuredContent) }]; + } else if (Array.isArray(result.content)) { + content = result.content; + } else { + content = [{ type: "text", text: JSON.stringify(output) }]; + } + } else if (Array.isArray(output)) { + // Already an array of content blocks + content = output; + } else if ( + typeof output === "object" && + output !== null && + "type" in output && + typeof (output as { type: unknown }).type === "string" + ) { + // Single content block object like {type: "text", text: "..."} + content = [output as { type: string; text?: string }]; + } else if ( + typeof output === "object" && + output !== null && + "text" in output && + typeof (output as { text: unknown }).text === "string" + ) { + // Object with just text field - treat as text content + content = [{ type: "text", text: (output as { text: string }).text }]; + } else if (typeof output === "object" && output !== null) { + // Plain object - use as structuredContent and generate text content + structuredContent = output as Record; + content = [{ type: "text", text: JSON.stringify(output) }]; + } else { + // Unknown shape - stringify it + content = [{ type: "text", text: JSON.stringify(output) }]; + } + + this.onmessage?.({ + jsonrpc: "2.0", + method: "ui/notifications/tool-result", + params: { + content, + structuredContent, + // Include _meta from toolResponseMetadata if available (use undefined not null) + _meta: this.openai.toolResponseMetadata ?? undefined, + }, + } as JSONRPCNotification); + }); + } + } + + /** + * Close the transport. + */ + async close(): Promise { + this._closed = true; + this.onclose?.(); + } + + /** + * Called when the transport is closed. + */ + onclose?: () => void; + + /** + * Called when an error occurs. + */ + onerror?: (error: Error) => void; + + /** + * Called when a message is received. + */ + onmessage?: (message: JSONRPCMessage) => void; + + /** + * Session identifier (unused in OpenAI mode). + */ + sessionId?: string; + + /** + * Callback to set the negotiated protocol version. + */ + setProtocolVersion?: (version: string) => void; +} + +// Re-export utility functions +export { isOpenAIEnvironment, getOpenAIGlobal }; diff --git a/src/openai/types.ts b/src/openai/types.ts new file mode 100644 index 00000000..435823f9 --- /dev/null +++ b/src/openai/types.ts @@ -0,0 +1,244 @@ +/** + * Type definitions for the OpenAI Apps SDK's window.openai object. + * + * These types describe the API surface that ChatGPT injects into widget iframes. + * When running in OpenAI mode, the {@link OpenAITransport} uses these APIs to + * communicate with the ChatGPT host. + * + * @see https://developers.openai.com/apps-sdk/build/chatgpt-ui/ + */ + +/** + * Display mode for the widget in ChatGPT. + */ +export type OpenAIDisplayMode = "inline" | "pip" | "fullscreen"; + +/** + * Theme setting from the ChatGPT host. + */ +export type OpenAITheme = "light" | "dark"; + +/** + * Safe area insets for the widget viewport. + */ +export interface OpenAISafeArea { + top: number; + right: number; + bottom: number; + left: number; +} + +/** + * Result of a tool call via window.openai.callTool(). + * + * Note: The exact return type isn't fully documented by OpenAI. + * Based on observed behavior, it returns structured content. + */ +export interface OpenAIToolCallResult { + /** Structured content from the tool (may be any shape) */ + structuredContent?: unknown; + /** Legacy content field (for compatibility) */ + content?: unknown; + /** Whether the tool call resulted in an error */ + isError?: boolean; +} + +/** + * The window.openai object injected by ChatGPT into widget iframes. + * + * This interface describes the API surface available to widgets running + * in the ChatGPT environment. + */ +export interface OpenAIGlobal { + // ───────────────────────────────────────────────────────────────────────── + // State & Data Properties + // ───────────────────────────────────────────────────────────────────────── + + /** + * Tool arguments passed when invoking the tool. + * Pre-populated when the widget loads. + */ + toolInput?: Record; + + /** + * Structured content returned by the MCP server. + * Pre-populated when the widget loads (if tool has completed). + */ + toolOutput?: unknown; + + /** + * The `_meta` payload from tool response (widget-only, hidden from model). + */ + toolResponseMetadata?: Record; + + /** + * Persisted UI state snapshot between renders. + * Set via setWidgetState(), rehydrated on subsequent renders. + */ + widgetState?: unknown; + + /** + * Current theme setting. + */ + theme?: OpenAITheme; + + /** + * Current display mode of the widget. + */ + displayMode?: OpenAIDisplayMode; + + /** + * Maximum height available for the widget. + */ + maxHeight?: number; + + /** + * Safe area insets for the widget. + */ + safeArea?: OpenAISafeArea; + + /** + * Current view mode. + */ + view?: string; + + /** + * User agent string from the host. + */ + userAgent?: string; + + /** + * Locale setting (BCP 47 language tag). + */ + locale?: string; + + // ───────────────────────────────────────────────────────────────────────── + // State Management Methods + // ───────────────────────────────────────────────────────────────────────── + + /** + * Persist UI state synchronously after interactions. + * State is scoped to this widget instance and rehydrated on re-renders. + * + * @param state - State object to persist + */ + setWidgetState?(state: unknown): void; + + // ───────────────────────────────────────────────────────────────────────── + // Tool & Chat Integration Methods + // ───────────────────────────────────────────────────────────────────────── + + /** + * Invoke another MCP tool from the widget. + * + * @param name - Name of the tool to call + * @param args - Arguments to pass to the tool + * @returns Promise resolving to the tool result + */ + callTool?( + name: string, + args?: Record, + ): Promise; + + /** + * Inject a user message into the conversation. + * + * @param options - Message options + * @param options.prompt - The message text to send + */ + sendFollowUpMessage?(options: { prompt: string }): Promise; + + // ───────────────────────────────────────────────────────────────────────── + // File Operations + // ───────────────────────────────────────────────────────────────────────── + + /** + * Upload a user-selected file. + * + * @param file - File to upload + * @returns Promise resolving to the file ID + */ + uploadFile?(file: File): Promise<{ fileId: string }>; + + /** + * Retrieve a temporary download URL for a file. + * + * @param options - File options + * @param options.fileId - ID of the file to download + * @returns Promise resolving to the download URL + */ + getFileDownloadUrl?(options: { fileId: string }): Promise<{ url: string }>; + + // ───────────────────────────────────────────────────────────────────────── + // Layout & Display Methods + // ───────────────────────────────────────────────────────────────────────── + + /** + * Request a display mode change (inline, pip, fullscreen). + * + * @param options - Display mode options + * @param options.mode - Requested display mode + */ + requestDisplayMode?(options: { mode: OpenAIDisplayMode }): Promise; + + /** + * Spawn a ChatGPT-owned modal. + */ + requestModal?(options: unknown): Promise; + + /** + * Report dynamic widget height to the host. + * + * @param height - Height in pixels + */ + notifyIntrinsicHeight?(height: number): void; + + /** + * Close the widget from the UI. + */ + requestClose?(): void; + + // ───────────────────────────────────────────────────────────────────────── + // Navigation Methods + // ───────────────────────────────────────────────────────────────────────── + + /** + * Open a vetted external link in a new tab. + * + * @param options - Link options + * @param options.href - URL to open + */ + openExternal?(options: { href: string }): Promise; +} + +/** + * Window type augmentation for OpenAI environment. + */ +export interface WindowWithOpenAI { + openai: OpenAIGlobal; +} + +/** + * Detect if the current environment has window.openai available. + * + * @returns true if running in OpenAI/ChatGPT environment + */ +export function isOpenAIEnvironment(): boolean { + return ( + typeof window !== "undefined" && + typeof (window as unknown as WindowWithOpenAI).openai === "object" && + (window as unknown as WindowWithOpenAI).openai !== null + ); +} + +/** + * Get the window.openai object if available. + * + * @returns The OpenAI global object, or undefined if not in OpenAI environment + */ +export function getOpenAIGlobal(): OpenAIGlobal | undefined { + if (isOpenAIEnvironment()) { + return (window as unknown as WindowWithOpenAI).openai; + } + return undefined; +} diff --git a/src/react/useApp.tsx b/src/react/useApp.tsx index ccfce3eb..111f8591 100644 --- a/src/react/useApp.tsx +++ b/src/react/useApp.tsx @@ -1,16 +1,12 @@ import { useEffect, useState } from "react"; import { Implementation } from "@modelcontextprotocol/sdk/types.js"; import { Client } from "@modelcontextprotocol/sdk/client"; -import { App, McpUiAppCapabilities, PostMessageTransport } from "../app"; +import { App, McpUiAppCapabilities } from "../app"; export * from "../app"; /** * Options for configuring the useApp hook. * - * Note: This interface does NOT expose App options like `autoResize`. - * The hook creates the App with default options (autoResize: true). If you need - * custom App options, create the App manually instead of using this hook. - * * @see {@link useApp} for the hook that uses these options * @see {@link useAutoResize} for manual auto-resize control with custom App options */ @@ -19,6 +15,18 @@ export interface UseAppOptions { appInfo: Implementation; /** Features and capabilities this app provides */ capabilities: McpUiAppCapabilities; + /** + * Enable experimental OpenAI compatibility. + * + * When enabled (default), the App will auto-detect the environment: + * - If `window.openai` exists → use OpenAI Apps SDK + * - Otherwise → use MCP Apps protocol via PostMessageTransport + * + * Set to `false` to force MCP-only mode. + * + * @default true + */ + experimentalOAICompatibility?: boolean; /** * Called after App is created but before connection. * @@ -60,14 +68,18 @@ export interface AppState { * React hook to create and connect an MCP App. * * This hook manages the complete lifecycle of an {@link App}: creation, connection, - * and cleanup. It automatically creates a {@link PostMessageTransport} to window.parent - * and handles initialization. + * and cleanup. It automatically detects the platform (MCP or OpenAI) and uses the + * appropriate transport. + * + * **Cross-Platform Support**: The hook supports both MCP-compatible hosts and + * OpenAI's ChatGPT environment. By default, it auto-detects the platform. + * Set `experimentalOAICompatibility: false` to force MCP-only mode. * * **Important**: The hook intentionally does NOT re-run when options change * to avoid reconnection loops. Options are only used during the initial mount. * * **Note**: This is part of the optional React integration. The core SDK - * (App, PostMessageTransport) is framework-agnostic and can be + * (App, PostMessageTransport, OpenAITransport) is framework-agnostic and can be * used with any UI framework or vanilla JavaScript. * * @param options - Configuration for the app @@ -75,22 +87,18 @@ export interface AppState { * initialization, the `error` field will contain the error (typically connection * timeouts, initialization handshake failures, or transport errors). * - * @example Basic usage + * @example Basic usage (auto-detects platform) * ```typescript - * import { useApp, McpUiToolInputNotificationSchema } from '@modelcontextprotocol/ext-apps/react'; + * import { useApp } from '@modelcontextprotocol/ext-apps/react'; * * function MyApp() { * const { app, isConnected, error } = useApp({ * appInfo: { name: "MyApp", version: "1.0.0" }, * capabilities: {}, * onAppCreated: (app) => { - * // Register handlers before connection - * app.setNotificationHandler( - * McpUiToolInputNotificationSchema, - * (notification) => { - * console.log("Tool input:", notification.params.arguments); - * } - * ); + * app.ontoolinput = (params) => { + * console.log("Tool input:", params.arguments); + * }; * }, * }); * @@ -100,12 +108,22 @@ export interface AppState { * } * ``` * + * @example Force MCP-only mode + * ```typescript + * const { app } = useApp({ + * appInfo: { name: "MyApp", version: "1.0.0" }, + * capabilities: {}, + * experimentalOAICompatibility: false, // Disable OpenAI auto-detection + * }); + * ``` + * * @see {@link App.connect} for the underlying connection method * @see {@link useAutoResize} for manual auto-resize control when using custom App options */ export function useApp({ appInfo, capabilities, + experimentalOAICompatibility = true, onAppCreated, }: UseAppOptions): AppState { const [app, setApp] = useState(null); @@ -117,13 +135,14 @@ export function useApp({ async function connect() { try { - const transport = new PostMessageTransport(window.parent); - const app = new App(appInfo, capabilities); + const app = new App(appInfo, capabilities, { + experimentalOAICompatibility, + }); // Register handlers BEFORE connecting onAppCreated?.(app); - await app.connect(transport); + await app.connect(); if (mounted) { setApp(app); diff --git a/src/server/index.test.ts b/src/server/index.test.ts index d5e0a80a..e4425583 100644 --- a/src/server/index.test.ts +++ b/src/server/index.test.ts @@ -4,6 +4,8 @@ import { registerAppResource, RESOURCE_URI_META_KEY, RESOURCE_MIME_TYPE, + OPENAI_RESOURCE_SUFFIX, + OPENAI_MIME_TYPE, } from "./index"; import type { McpServer } from "@modelcontextprotocol/sdk/server/mcp.js"; @@ -53,6 +55,34 @@ describe("registerAppTool", () => { expect(capturedHandler).toBe(handler); }); + it("should add openai/outputTemplate metadata for cross-platform compatibility", () => { + let capturedConfig: Record | undefined; + + const mockServer = { + registerTool: mock( + (_name: string, config: Record, _handler: unknown) => { + capturedConfig = config; + }, + ), + }; + + registerAppTool( + mockServer as unknown as Pick, + "my-tool", + { + _meta: { + [RESOURCE_URI_META_KEY]: "ui://test/widget.html", + }, + }, + async () => ({ content: [{ type: "text" as const, text: "ok" }] }), + ); + + const meta = capturedConfig?._meta as Record; + expect(meta["openai/outputTemplate"]).toBe( + "ui://test/widget.html" + OPENAI_RESOURCE_SUFFIX, + ); + }); + describe("backward compatibility", () => { it("should set legacy key when _meta.ui.resourceUri is provided", () => { let capturedConfig: Record | undefined; @@ -196,18 +226,18 @@ describe("registerAppTool", () => { }); describe("registerAppResource", () => { - it("should register a resource with default MIME type", () => { - let capturedName: string | undefined; - let capturedUri: string | undefined; - let capturedConfig: Record | undefined; + it("should register both MCP and OpenAI resources", () => { + const registrations: Array<{ + name: string; + uri: string; + config: Record; + }> = []; const mockServer = { registerTool: mock(() => {}), registerResource: mock( (name: string, uri: string, config: Record) => { - capturedName = name; - capturedUri = uri; - capturedConfig = config; + registrations.push({ name, uri, config }); }, ), }; @@ -233,21 +263,32 @@ describe("registerAppResource", () => { callback, ); - expect(mockServer.registerResource).toHaveBeenCalledTimes(1); - expect(capturedName).toBe("My Resource"); - expect(capturedUri).toBe("ui://test/widget.html"); - expect(capturedConfig?.mimeType).toBe(RESOURCE_MIME_TYPE); - expect(capturedConfig?.description).toBe("A test resource"); + // Should register TWO resources (MCP + OpenAI) + expect(mockServer.registerResource).toHaveBeenCalledTimes(2); + + // First: MCP resource + expect(registrations[0].name).toBe("My Resource"); + expect(registrations[0].uri).toBe("ui://test/widget.html"); + expect(registrations[0].config.mimeType).toBe(RESOURCE_MIME_TYPE); + expect(registrations[0].config.description).toBe("A test resource"); + + // Second: OpenAI resource + expect(registrations[1].name).toBe("My Resource (OpenAI)"); + expect(registrations[1].uri).toBe( + "ui://test/widget.html" + OPENAI_RESOURCE_SUFFIX, + ); + expect(registrations[1].config.mimeType).toBe(OPENAI_MIME_TYPE); + expect(registrations[1].config.description).toBe("A test resource"); }); - it("should allow custom MIME type to override default", () => { - let capturedConfig: Record | undefined; + it("should allow custom MIME type to override default for MCP resource", () => { + const registrations: Array<{ config: Record }> = []; const mockServer = { registerTool: mock(() => {}), registerResource: mock( (_name: string, _uri: string, config: Record) => { - capturedConfig = config; + registrations.push({ config }); }, ), }; @@ -271,12 +312,16 @@ describe("registerAppResource", () => { }), ); - // Custom mimeType should override the default - expect(capturedConfig?.mimeType).toBe("text/html"); + // MCP resource should use custom mimeType + expect(registrations[0].config.mimeType).toBe("text/html"); + // OpenAI resource should always use skybridge MIME type + expect(registrations[1].config.mimeType).toBe(OPENAI_MIME_TYPE); }); - it("should call the callback when handler is invoked", async () => { - let capturedHandler: (() => Promise) | undefined; + it("should transform OpenAI resource callback to use skybridge MIME type", async () => { + let mcpHandler: (() => Promise) | undefined; + let openaiHandler: (() => Promise) | undefined; + let callCount = 0; const mockServer = { registerTool: mock(() => {}), @@ -287,12 +332,17 @@ describe("registerAppResource", () => { _config: unknown, handler: () => Promise, ) => { - capturedHandler = handler; + if (callCount === 0) { + mcpHandler = handler; + } else { + openaiHandler = handler; + } + callCount++; }, ), }; - const expectedResult = { + const callback = mock(async () => ({ contents: [ { uri: "ui://test/widget.html", @@ -300,8 +350,7 @@ describe("registerAppResource", () => { text: "content", }, ], - }; - const callback = mock(async () => expectedResult); + })); registerAppResource( mockServer as unknown as Pick, @@ -311,10 +360,70 @@ describe("registerAppResource", () => { callback, ); - expect(capturedHandler).toBeDefined(); - const result = await capturedHandler!(); + // MCP handler should return original content + const mcpResult = (await mcpHandler!()) as { + contents: Array<{ uri: string; mimeType: string }>; + }; + expect(mcpResult.contents[0].mimeType).toBe(RESOURCE_MIME_TYPE); + + // OpenAI handler should return with skybridge MIME type + const openaiResult = (await openaiHandler!()) as { + contents: Array<{ uri: string; mimeType: string }>; + }; + expect(openaiResult.contents[0].uri).toBe( + "ui://test/widget.html" + OPENAI_RESOURCE_SUFFIX, + ); + expect(openaiResult.contents[0].mimeType).toBe(OPENAI_MIME_TYPE); + }); + + it("should preserve custom MIME types in OpenAI resource callback", async () => { + let openaiHandler: (() => Promise) | undefined; + let callCount = 0; + + const mockServer = { + registerTool: mock(() => {}), + registerResource: mock( + ( + _name: string, + _uri: string, + _config: unknown, + handler: () => Promise, + ) => { + if (callCount === 1) { + openaiHandler = handler; + } + callCount++; + }, + ), + }; + + // Callback returns custom MIME type (not the default MCP App type) + const callback = mock(async () => ({ + contents: [ + { + uri: "ui://test/widget.html", + mimeType: "application/json", + text: "{}", + }, + ], + })); - expect(callback).toHaveBeenCalledTimes(1); - expect(result).toEqual(expectedResult); + registerAppResource( + mockServer as unknown as Pick, + "My Resource", + "ui://test/widget.html", + { _meta: { ui: {} } }, + callback, + ); + + // OpenAI handler should preserve the custom MIME type + const openaiResult = (await openaiHandler!()) as { + contents: Array<{ uri: string; mimeType: string }>; + }; + expect(openaiResult.contents[0].uri).toBe( + "ui://test/widget.html" + OPENAI_RESOURCE_SUFFIX, + ); + // Custom MIME type should be preserved, not converted to skybridge + expect(openaiResult.contents[0].mimeType).toBe("application/json"); }); }); diff --git a/src/server/index.ts b/src/server/index.ts index 2191a72f..9607a3e5 100644 --- a/src/server/index.ts +++ b/src/server/index.ts @@ -1,6 +1,16 @@ /** * Server Helpers for MCP Apps. * + * These utilities register tools and resources that work with both + * MCP-compatible hosts and OpenAI's ChatGPT Apps SDK. + * + * ## Cross-Platform Support + * + * | Feature | MCP Apps | OpenAI Apps SDK | + * |---------|----------|-----------------| + * | Tool metadata | `_meta.ui.resourceUri` | `_meta["openai/outputTemplate"]` | + * | Resource MIME | `text/html;profile=mcp-app` | `text/html+skybridge` | + * * @module server-helpers */ @@ -23,6 +33,17 @@ import type { ZodRawShape } from "zod"; export { RESOURCE_URI_META_KEY, RESOURCE_MIME_TYPE }; export type { ResourceMetadata, ToolCallback, ReadResourceCallback }; +/** + * OpenAI skybridge URI suffix. + * Appended to resource URIs for OpenAI-specific resource registration. + */ +export const OPENAI_RESOURCE_SUFFIX = "+skybridge"; + +/** + * OpenAI skybridge MIME type. + */ +export const OPENAI_MIME_TYPE = "text/html+skybridge"; + /** * Tool configuration (same as McpServer.registerTool). */ @@ -47,7 +68,7 @@ export interface McpUiAppToolConfig extends ToolConfig { | { /** * URI of the UI resource to display for this tool. - * This is converted to `_meta["ui/resourceUri"]`. + * This is converted to `_meta.ui.resourceUri`. * * @example "ui://weather/widget.html" * @@ -118,15 +139,31 @@ export function registerAppTool( normalizedMeta = { ...meta, ui: { ...uiMeta, resourceUri: legacyUri } }; } + // Get the resource URI after normalization + const resourceUri = (normalizedMeta.ui as McpUiToolMeta | undefined) + ?.resourceUri; + + // Add OpenAI outputTemplate metadata for cross-platform compatibility + if (resourceUri) { + normalizedMeta = { + ...normalizedMeta, + "openai/outputTemplate": resourceUri + OPENAI_RESOURCE_SUFFIX, + }; + } + server.registerTool(name, { ...config, _meta: normalizedMeta }, handler); } /** - * Register an app resource with the MCP server. + * Register an app resource with dual MCP/OpenAI support. * * This is a convenience wrapper around `server.registerResource` that: * - Defaults the MIME type to "text/html;profile=mcp-app" - * - Provides a cleaner API matching the SDK's callback signature + * - Registers both MCP and OpenAI variants for cross-platform compatibility + * + * Registers two resources: + * 1. MCP resource at the base URI with `text/html;profile=mcp-app` MIME type + * 2. OpenAI resource at URI+skybridge with `text/html+skybridge` MIME type * * @param server - The MCP server instance * @param name - Human-readable resource name @@ -157,6 +194,9 @@ export function registerAppResource( config: McpUiAppResourceConfig, readCallback: ReadResourceCallback, ): void { + const openaiUri = uri + OPENAI_RESOURCE_SUFFIX; + + // Register MCP resource (text/html;profile=mcp-app) server.registerResource( name, uri, @@ -167,4 +207,30 @@ export function registerAppResource( }, readCallback, ); + + // Register OpenAI resource (text/html+skybridge) + // Re-uses the same callback but returns with OpenAI MIME type + server.registerResource( + name + " (OpenAI)", + openaiUri, + { + ...config, + // Force OpenAI MIME type + mimeType: OPENAI_MIME_TYPE, + }, + async (resourceUri, extra) => { + const result = await readCallback(resourceUri, extra); + // Transform contents to use OpenAI MIME type + return { + contents: result.contents.map((content) => ({ + ...content, + uri: content.uri + OPENAI_RESOURCE_SUFFIX, + mimeType: + content.mimeType === RESOURCE_MIME_TYPE + ? OPENAI_MIME_TYPE + : content.mimeType, + })), + }; + }, + ); } diff --git a/tests/e2e/servers.spec.ts-snapshots/threejs.png b/tests/e2e/servers.spec.ts-snapshots/threejs.png index 683a77de..fbbb8e71 100644 Binary files a/tests/e2e/servers.spec.ts-snapshots/threejs.png and b/tests/e2e/servers.spec.ts-snapshots/threejs.png differ