Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 2 additions & 0 deletions apps/cli/src/lib/utils/context-window.ts
Original file line number Diff line number Diff line change
Expand Up @@ -46,6 +46,8 @@ function getModelIdForProvider(config: ProviderSettings): string | undefined {
return config.openAiModelId
case "requesty":
return config.requestyModelId
case "unbound":
return config.unboundModelId
case "litellm":
return config.litellmModelId
case "vercel-ai-gateway":
Expand Down
1 change: 1 addition & 0 deletions packages/types/src/global-settings.ts
Original file line number Diff line number Diff line change
Expand Up @@ -264,6 +264,7 @@ export const SECRET_STATE_KEYS = [
"mistralApiKey",
"minimaxApiKey",
"requestyApiKey",
"unboundApiKey",
"xaiApiKey",
"litellmApiKey",
"codeIndexOpenAiKey",
Expand Down
13 changes: 11 additions & 2 deletions packages/types/src/provider-settings.ts
Original file line number Diff line number Diff line change
Expand Up @@ -34,7 +34,7 @@ export const DEFAULT_CONSECUTIVE_MISTAKE_LIMIT = 3
* Dynamic provider requires external API calls in order to get the model list.
*/

export const dynamicProviders = ["openrouter", "vercel-ai-gateway", "litellm", "requesty", "roo"] as const
export const dynamicProviders = ["openrouter", "vercel-ai-gateway", "litellm", "requesty", "roo", "unbound"] as const

export type DynamicProvider = (typeof dynamicProviders)[number]

Expand Down Expand Up @@ -142,7 +142,6 @@ export const retiredProviderNames = [
"groq",
"huggingface",
"io-intelligence",
"unbound",
] as const

export const retiredProviderNamesSchema = z.enum(retiredProviderNames)
Expand Down Expand Up @@ -327,6 +326,11 @@ const requestySchema = baseProviderSettingsSchema.extend({
requestyModelId: z.string().optional(),
})

const unboundSchema = baseProviderSettingsSchema.extend({
unboundApiKey: z.string().optional(),
unboundModelId: z.string().optional(),
})

const fakeAiSchema = baseProviderSettingsSchema.extend({
fakeAi: z.unknown().optional(),
})
Expand Down Expand Up @@ -399,6 +403,7 @@ export const providerSettingsSchemaDiscriminated = z.discriminatedUnion("apiProv
moonshotSchema.merge(z.object({ apiProvider: z.literal("moonshot") })),
minimaxSchema.merge(z.object({ apiProvider: z.literal("minimax") })),
requestySchema.merge(z.object({ apiProvider: z.literal("requesty") })),
unboundSchema.merge(z.object({ apiProvider: z.literal("unbound") })),
fakeAiSchema.merge(z.object({ apiProvider: z.literal("fake-ai") })),
xaiSchema.merge(z.object({ apiProvider: z.literal("xai") })),
basetenSchema.merge(z.object({ apiProvider: z.literal("baseten") })),
Expand Down Expand Up @@ -431,6 +436,7 @@ export const providerSettingsSchema = z.object({
...moonshotSchema.shape,
...minimaxSchema.shape,
...requestySchema.shape,
...unboundSchema.shape,
...fakeAiSchema.shape,
...xaiSchema.shape,
...basetenSchema.shape,
Expand Down Expand Up @@ -468,6 +474,7 @@ export const modelIdKeys = [
"lmStudioModelId",
"lmStudioDraftModelId",
"requestyModelId",
"unboundModelId",
"litellmModelId",
"vercelAiGatewayModelId",
] as const satisfies readonly (keyof ProviderSettings)[]
Expand Down Expand Up @@ -505,6 +512,7 @@ export const modelIdKeysByProvider: Record<TypicalProvider, ModelIdKey> = {
deepseek: "apiModelId",
"qwen-code": "apiModelId",
requesty: "requestyModelId",
unbound: "unboundModelId",
xai: "apiModelId",
baseten: "apiModelId",
litellm: "litellmModelId",
Expand Down Expand Up @@ -627,6 +635,7 @@ export const MODELS_BY_PROVIDER: Record<
litellm: { id: "litellm", label: "LiteLLM", models: [] },
openrouter: { id: "openrouter", label: "OpenRouter", models: [] },
requesty: { id: "requesty", label: "Requesty", models: [] },
unbound: { id: "unbound", label: "Unbound", models: [] },
"vercel-ai-gateway": { id: "vercel-ai-gateway", label: "Vercel AI Gateway", models: [] },

// Local providers; models discovered from localhost endpoints.
Expand Down
4 changes: 4 additions & 0 deletions packages/types/src/providers/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,7 @@ export * from "./qwen-code.js"
export * from "./requesty.js"
export * from "./roo.js"
export * from "./sambanova.js"
export * from "./unbound.js"
export * from "./vertex.js"
export * from "./vscode-llm.js"
export * from "./xai.js"
Expand All @@ -39,6 +40,7 @@ import { qwenCodeDefaultModelId } from "./qwen-code.js"
import { requestyDefaultModelId } from "./requesty.js"
import { rooDefaultModelId } from "./roo.js"
import { sambaNovaDefaultModelId } from "./sambanova.js"
import { unboundDefaultModelId } from "./unbound.js"
import { vertexDefaultModelId } from "./vertex.js"
import { vscodeLlmDefaultModelId } from "./vscode-llm.js"
import { xaiDefaultModelId } from "./xai.js"
Expand Down Expand Up @@ -105,6 +107,8 @@ export function getProviderDefaultModelId(
return rooDefaultModelId
case "qwen-code":
return qwenCodeDefaultModelId
case "unbound":
return unboundDefaultModelId
case "vercel-ai-gateway":
return vercelAiGatewayDefaultModelId
case "anthropic":
Expand Down
16 changes: 16 additions & 0 deletions packages/types/src/providers/unbound.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,16 @@
import type { ModelInfo } from "../model.js"

// Unbound
// https://gateway.getunbound.ai
export const unboundDefaultModelId = "anthropic/claude-sonnet-4-5"

export const unboundDefaultModelInfo: ModelInfo = {
maxTokens: 8192,
contextWindow: 200_000,
supportsImages: true,
supportsPromptCache: true,
inputPrice: 3.0,
outputPrice: 15.0,
cacheWritesPrice: 3.75,
cacheReadsPrice: 0.3,
}
3 changes: 3 additions & 0 deletions src/api/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,7 @@ import {
MistralHandler,
VsCodeLmHandler,
RequestyHandler,
UnboundHandler,
FakeAIHandler,
XAIHandler,
LiteLLMHandler,
Expand Down Expand Up @@ -151,6 +152,8 @@ export function buildApiHandler(configuration: ProviderSettings): ApiHandler {
return new MistralHandler(options)
case "requesty":
return new RequestyHandler(options)
case "unbound":
return new UnboundHandler(options)
case "fake-ai":
return new FakeAIHandler(options)
case "xai":
Expand Down
4 changes: 4 additions & 0 deletions src/api/providers/fetchers/modelCache.ts
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,7 @@ import { fileExistsAtPath } from "../../../utils/fs"
import { getOpenRouterModels } from "./openrouter"
import { getVercelAiGatewayModels } from "./vercel-ai-gateway"
import { getRequestyModels } from "./requesty"
import { getUnboundModels } from "./unbound"
import { getLiteLLMModels } from "./litellm"
import { GetModelsOptions } from "../../../shared/api"
import { getOllamaModels } from "./ollama"
Expand Down Expand Up @@ -68,6 +69,9 @@ async function fetchModelsFromProvider(options: GetModelsOptions): Promise<Model
// Requesty models endpoint requires an API key for per-user custom policies.
models = await getRequestyModels(options.baseUrl, options.apiKey)
break
case "unbound":
models = await getUnboundModels(options.apiKey)
break
case "litellm":
// Type safety ensures apiKey and baseUrl are always provided for LiteLLM.
models = await getLiteLLMModels(options.apiKey, options.baseUrl)
Expand Down
40 changes: 40 additions & 0 deletions src/api/providers/fetchers/unbound.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,40 @@
import axios from "axios"

import type { ModelInfo } from "@roo-code/types"

import { parseApiPrice } from "../../../shared/cost"

export async function getUnboundModels(apiKey?: string | null): Promise<Record<string, ModelInfo>> {
const models: Record<string, ModelInfo> = {}

try {
const headers: Record<string, string> = {}

if (apiKey) {
headers["Authorization"] = `Bearer ${apiKey}`
}

const response = await axios.get("https://api.getunbound.ai/models", { headers })
const rawModels = response.data?.data ?? response.data

for (const rawModel of rawModels) {
const modelInfo: ModelInfo = {
maxTokens: rawModel.max_output_tokens ?? 8192,
contextWindow: rawModel.context_window ?? 200_000,
supportsPromptCache: rawModel.supports_caching ?? false,
supportsImages: rawModel.supports_vision ?? false,
inputPrice: parseApiPrice(rawModel.input_price),
outputPrice: parseApiPrice(rawModel.output_price),
description: rawModel.description,
cacheWritesPrice: parseApiPrice(rawModel.caching_price),
cacheReadsPrice: parseApiPrice(rawModel.cached_price),
}

models[rawModel.id] = modelInfo
}
} catch (error) {
console.error(`Error fetching Unbound models: ${JSON.stringify(error, Object.getOwnPropertyNames(error), 2)}`)
}

return models
}
1 change: 1 addition & 0 deletions src/api/providers/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,7 @@ export { OpenRouterHandler } from "./openrouter"
export { QwenCodeHandler } from "./qwen-code"
export { RequestyHandler } from "./requesty"
export { SambaNovaHandler } from "./sambanova"
export { UnboundHandler } from "./unbound"
export { VertexHandler } from "./vertex"
export { VsCodeLmHandler } from "./vscode-lm"
export { XAIHandler } from "./xai"
Expand Down
Loading
Loading