Skip to content
121 changes: 121 additions & 0 deletions packages/opencode/src/provider/provider.ts
Original file line number Diff line number Diff line change
Expand Up @@ -439,6 +439,92 @@ export namespace Provider {
},
}
},
unbound: async (input) => {
const config = await Config.get()
const providerConfig = config.provider?.["unbound"]

// Get API key from env, auth storage, or config (consistent with cloudflare-ai-gateway pattern)
const apiKey = await (async () => {
const envKey = input.env.map((key) => Env.get(key)).find(Boolean)
if (envKey) return envKey
const auth = await Auth.get(input.id)
if (auth?.type === "api") return auth.key
return providerConfig?.options?.apiKey
})()

if (!apiKey) return { autoload: false }

const baseURL = providerConfig?.options?.baseURL ?? "https://api.getunbound.ai/v1"

// Fetch available models from Unbound gateway
try {
const response = await fetch(`${baseURL}/models`, {
headers: { Authorization: `Bearer ${apiKey}` },
signal: AbortSignal.timeout(10000),
})

if (response.ok) {
const data = await response.json()
const models = data.data ?? data.models ?? []

if (models.length > 0) {
delete input.models["default"]
for (const model of models) {
const modelId = model.id ?? model.name
const params = model.parameters ?? model
const pricing = model.pricing ?? {}
const supportsImages = params.supports_images ?? params.supportsImages ?? false

input.models[modelId] = {
id: modelId,
providerID: "unbound",
name: model.name ?? modelId,
api: { id: modelId, url: baseURL, npm: "@ai-sdk/openai-compatible" },
status: "active",
headers: {},
options: { supportsPromptCaching: params.supports_prompt_caching ?? params.supportsPromptCaching ?? false },
cost: {
input: parseFloat(pricing.input_token_price ?? pricing.inputTokenPrice) || 0,
output: parseFloat(pricing.output_token_price ?? pricing.outputTokenPrice) || 0,
cache: {
read: parseFloat(pricing.cache_read_price ?? pricing.cacheReadPrice) || 0,
write: parseFloat(pricing.cache_write_price ?? pricing.cacheWritePrice) || 0,
},
},
limit: {
context: params.context_window ?? params.contextWindow ?? 128000,
output: params.max_tokens ?? params.maxTokens ?? 4096,
},
capabilities: {
temperature: true,
reasoning: false,
attachment: supportsImages,
toolcall: true,
input: { text: true, audio: false, image: supportsImages, video: false, pdf: false },
output: { text: true, audio: false, image: false, video: false, pdf: false },
interleaved: false,
},
release_date: new Date().toISOString().split("T")[0],
variants: {},
}
}
}
} else {
log.warn("Failed to fetch Unbound models", { status: response.status, statusText: response.statusText })
}
} catch (e) {
log.warn("Failed to fetch Unbound models, using default", { error: e })
}

return {
autoload: true,
options: {
headers: {
"X-Unbound-Metadata": JSON.stringify({ labels: [{ key: "app", value: "opencode" }] }),
},
},
}
},
}

export const Model = z
Expand Down Expand Up @@ -643,6 +729,41 @@ export namespace Provider {
}
}

// Add Unbound provider (AI Gateway) - models are fetched dynamically via custom loader
if (!database["unbound"]) {
database["unbound"] = {
id: "unbound",
name: "Unbound",
source: "custom",
env: ["UNBOUND_API_KEY"],
options: {},
models: {
default: {
id: "default",
providerID: "unbound",
name: "Default Model",
api: { id: "default", url: "https://api.getunbound.ai/v1", npm: "@ai-sdk/openai-compatible" },
status: "active",
headers: {},
options: {},
cost: { input: 0, output: 0, cache: { read: 0, write: 0 } },
limit: { context: 128000, output: 4096 },
capabilities: {
temperature: true,
reasoning: false,
attachment: true,
toolcall: true,
input: { text: true, audio: false, image: true, video: false, pdf: false },
output: { text: true, audio: false, image: false, video: false, pdf: false },
interleaved: false,
},
release_date: "2024-01-01",
variants: {},
},
},
}
}

function mergeProvider(providerID: string, provider: Partial<Info>) {
const existing = providers[providerID]
if (existing) {
Expand Down
Loading