Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
12 changes: 12 additions & 0 deletions packages/types/src/provider-settings.ts
Original file line number Diff line number Diff line change
Expand Up @@ -50,6 +50,7 @@ export const dynamicProviders = [
"unbound",
"roo",
"chutes",
"keywordsai",
] as const

export type DynamicProvider = (typeof dynamicProviders)[number]
Expand Down Expand Up @@ -140,6 +141,7 @@ export const providerNames = [
"vertex",
"xai",
"zai",
"keywordsai",
] as const

export const providerNamesSchema = z.enum(providerNames)
Expand Down Expand Up @@ -417,6 +419,12 @@ const basetenSchema = apiModelIdProviderModelSchema.extend({
basetenApiKey: z.string().optional(),
})

const keywordsaiSchema = apiModelIdProviderModelSchema.extend({
keywordsaiApiKey: z.string().optional(),
keywordsaiBaseUrl: z.string().optional(),
keywordsaiEnableLogging: z.boolean().optional(),
})

const defaultSchema = z.object({
apiProvider: z.undefined(),
})
Expand Down Expand Up @@ -458,6 +466,7 @@ export const providerSettingsSchemaDiscriminated = z.discriminatedUnion("apiProv
qwenCodeSchema.merge(z.object({ apiProvider: z.literal("qwen-code") })),
rooSchema.merge(z.object({ apiProvider: z.literal("roo") })),
vercelAiGatewaySchema.merge(z.object({ apiProvider: z.literal("vercel-ai-gateway") })),
keywordsaiSchema.merge(z.object({ apiProvider: z.literal("keywordsai") })),
defaultSchema,
])

Expand Down Expand Up @@ -499,6 +508,7 @@ export const providerSettingsSchema = z.object({
...qwenCodeSchema.shape,
...rooSchema.shape,
...vercelAiGatewaySchema.shape,
...keywordsaiSchema.shape,
...codebaseIndexProviderSchema.shape,
})

Expand Down Expand Up @@ -584,6 +594,7 @@ export const modelIdKeysByProvider: Record<TypicalProvider, ModelIdKey> = {
"io-intelligence": "ioIntelligenceModelId",
roo: "apiModelId",
"vercel-ai-gateway": "vercelAiGatewayModelId",
keywordsai: "apiModelId",
}

/**
Expand Down Expand Up @@ -720,6 +731,7 @@ export const MODELS_BY_PROVIDER: Record<
deepinfra: { id: "deepinfra", label: "DeepInfra", models: [] },
"vercel-ai-gateway": { id: "vercel-ai-gateway", label: "Vercel AI Gateway", models: [] },
chutes: { id: "chutes", label: "Chutes AI", models: [] },
keywordsai: { id: "keywordsai", label: "Keywords AI", models: [] },

// Local providers; models discovered from localhost endpoints.
lmstudio: { id: "lmstudio", label: "LM Studio", models: [] },
Expand Down
3 changes: 3 additions & 0 deletions src/api/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -41,6 +41,7 @@ import {
DeepInfraHandler,
MiniMaxHandler,
BasetenHandler,
KeywordsAiHandler,
} from "./providers"
import { NativeOllamaHandler } from "./providers/native-ollama"

Expand Down Expand Up @@ -197,6 +198,8 @@ export function buildApiHandler(configuration: ProviderSettings): ApiHandler {
return new MiniMaxHandler(options)
case "baseten":
return new BasetenHandler(options)
case "keywordsai":
return new KeywordsAiHandler(options)
default:
return new AnthropicHandler(options)
}
Expand Down
55 changes: 55 additions & 0 deletions src/api/providers/fetchers/keywordsai.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,55 @@
import axios from "axios"
import { z } from "zod"

import type { ModelInfo } from "@roo-code/types"

import { DEFAULT_HEADERS } from "../constants"

const KeywordsAIProviderSchema = z.object({
provider_name: z.string().optional(),
provider_id: z.string().optional(),
moderation: z.string().optional(),
credential_fields: z.array(z.string()).optional(),
})

const KeywordsAIModelSchema = z.object({
model_name: z.string(),
max_context_window: z.number(),
input_cost: z.number(),
output_cost: z.number(),
rate_limit: z.number().optional(),
provider: KeywordsAIProviderSchema.optional(),
})

const KeywordsAIModelsResponseSchema = z.object({
models: z.array(KeywordsAIModelSchema),
})

export async function getKeywordsAiModels(
baseUrl: string = "https://api.keywordsai.co/api/",
): Promise<Record<string, ModelInfo>> {
const url = `${baseUrl.replace(/\/$/, "")}/models/public`
const models: Record<string, ModelInfo> = {}

const response = await axios.get(url, { headers: DEFAULT_HEADERS })
const parsed = KeywordsAIModelsResponseSchema.safeParse(response.data)
const data = parsed.success ? parsed.data.models : (response.data?.models ?? [])

for (const m of data as z.infer<typeof KeywordsAIModelSchema>[]) {
const contextWindow = m.max_context_window ?? 8192
const maxTokens = Math.ceil(contextWindow * 0.2)

const info: ModelInfo = {
maxTokens,
contextWindow,
supportsImages: false,
supportsPromptCache: false,
inputPrice: m.input_cost,
outputPrice: m.output_cost,
}

models[m.model_name] = info
}

return models
}
8 changes: 8 additions & 0 deletions src/api/providers/fetchers/modelCache.ts
Original file line number Diff line number Diff line change
Expand Up @@ -29,6 +29,7 @@ import { getDeepInfraModels } from "./deepinfra"
import { getHuggingFaceModels } from "./huggingface"
import { getRooModels } from "./roo"
import { getChutesModels } from "./chutes"
import { getKeywordsAiModels } from "./keywordsai"

const memoryCache = new NodeCache({ stdTTL: 5 * 60, checkperiod: 5 * 60 })

Expand Down Expand Up @@ -108,6 +109,9 @@ async function fetchModelsFromProvider(options: GetModelsOptions): Promise<Model
case "chutes":
models = await getChutesModels(options.apiKey)
break
case "keywordsai":
models = await getKeywordsAiModels(options.baseUrl)
break
default: {
// Ensures router is exhaustively checked if RouterName is a strict union.
const exhaustiveCheck: never = provider
Expand Down Expand Up @@ -250,6 +254,10 @@ export async function initializeModelCacheRefresh(): Promise<void> {
{ provider: "openrouter", options: { provider: "openrouter" } },
{ provider: "vercel-ai-gateway", options: { provider: "vercel-ai-gateway" } },
{ provider: "chutes", options: { provider: "chutes" } },
{
provider: "keywordsai",
options: { provider: "keywordsai", baseUrl: "https://api.keywordsai.co/api/" },
},
]

// Refresh each provider in background (fire and forget)
Expand Down
1 change: 1 addition & 0 deletions src/api/providers/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -33,3 +33,4 @@ export { VercelAiGatewayHandler } from "./vercel-ai-gateway"
export { DeepInfraHandler } from "./deepinfra"
export { MiniMaxHandler } from "./minimax"
export { BasetenHandler } from "./baseten"
export { KeywordsAiHandler } from "./keywordsai"
30 changes: 30 additions & 0 deletions src/api/providers/keywordsai.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,30 @@
import type { ApiHandlerOptions } from "../../shared/api"
import { OpenAiHandler } from "./openai"

/**
* Keywords AI gateway handler. Uses the OpenAI-compatible gateway;
* only adds disable_log when logging is disabled (enable-logging option).
*/
export class KeywordsAiHandler extends OpenAiHandler {
constructor(options: ApiHandlerOptions) {
const baseUrl = options.keywordsaiBaseUrl || "https://api.keywordsai.co/api/"
super({
...options,
openAiApiKey: options.keywordsaiApiKey ?? "not-provided",
openAiBaseUrl: baseUrl,
openAiModelId: options.apiModelId,
openAiStreamingEnabled: true,
openAiHeaders: {
"X-KeywordsAI-Source": "RooCode-Extension",
...(options.openAiHeaders || {}),
},
})
}

protected override getExtraRequestParams(): Record<string, unknown> {
if (this.options.keywordsaiEnableLogging === false) {
return { disable_log: true }
}
return {}
}
}
12 changes: 12 additions & 0 deletions src/api/providers/openai.ts
Original file line number Diff line number Diff line change
Expand Up @@ -167,6 +167,8 @@ export class OpenAiHandler extends BaseProvider implements SingleCompletionHandl
// Add max_tokens if needed
this.addMaxTokensIfNeeded(requestOptions, modelInfo)

Object.assign(requestOptions, this.getExtraRequestParams())

let stream
try {
stream = await this.client.chat.completions.create(
Expand Down Expand Up @@ -235,6 +237,8 @@ export class OpenAiHandler extends BaseProvider implements SingleCompletionHandl
// Add max_tokens if needed
this.addMaxTokensIfNeeded(requestOptions, modelInfo)

Object.assign(requestOptions, this.getExtraRequestParams())

let response
try {
response = await this.client.chat.completions.create(
Expand Down Expand Up @@ -269,6 +273,14 @@ export class OpenAiHandler extends BaseProvider implements SingleCompletionHandl
}
}

/**
* Optional extra body params merged into chat completions create().
* Subclasses (e.g. Keywords AI gateway) use this for provider-specific params like disable_log.
*/
protected getExtraRequestParams(): Record<string, unknown> {
return {}
}

protected processUsageMetrics(usage: any, _modelInfo?: ModelInfo): ApiStreamUsageChunk {
return {
type: "usage",
Expand Down
10 changes: 6 additions & 4 deletions src/core/task/Task.ts
Original file line number Diff line number Diff line change
Expand Up @@ -2704,7 +2704,9 @@ export class Task extends EventEmitter<TaskEvents> implements TaskLike {
// Yields only if the first chunk is successful, otherwise will
// allow the user to retry the request (most likely due to rate
// limit error, which gets thrown on the first chunk).
const stream = this.attemptApiRequest(currentItem.retryAttempt ?? 0, { skipProviderRateLimit: true })
const stream = this.attemptApiRequest(currentItem.retryAttempt ?? 0, {
skipProviderRateLimit: true,
})
let assistantMessage = ""
let reasoningMessage = ""
let pendingGroundingSources: GroundingSource[] = []
Expand Down Expand Up @@ -4202,7 +4204,7 @@ export class Task extends EventEmitter<TaskEvents> implements TaskLike {
)
await this.handleContextWindowExceededError()
// Retry the request after handling the context window error
yield* this.attemptApiRequest(retryAttempt + 1)
yield* this.attemptApiRequest(retryAttempt + 1, options)
return
}

Expand All @@ -4222,7 +4224,7 @@ export class Task extends EventEmitter<TaskEvents> implements TaskLike {

// Delegate generator output from the recursive call with
// incremented retry count.
yield* this.attemptApiRequest(retryAttempt + 1)
yield* this.attemptApiRequest(retryAttempt + 1, options)

return
} else {
Expand All @@ -4240,7 +4242,7 @@ export class Task extends EventEmitter<TaskEvents> implements TaskLike {
await this.say("api_req_retried")

// Delegate generator output from the recursive call.
yield* this.attemptApiRequest()
yield* this.attemptApiRequest(0, options)
return
}
}
Expand Down
8 changes: 8 additions & 0 deletions src/core/webview/webviewMessageHandler.ts
Original file line number Diff line number Diff line change
Expand Up @@ -874,6 +874,7 @@ export const webviewMessageHandler = async (
lmstudio: {},
roo: {},
chutes: {},
keywordsai: {},
}

const safeGetModels = async (options: GetModelsOptions): Promise<ModelRecord> => {
Expand Down Expand Up @@ -924,6 +925,13 @@ export const webviewMessageHandler = async (
key: "chutes",
options: { provider: "chutes", apiKey: apiConfiguration.chutesApiKey },
},
{
key: "keywordsai",
options: {
provider: "keywordsai",
baseUrl: apiConfiguration.keywordsaiBaseUrl || "https://api.keywordsai.co/api/",
},
},
]

// IO Intelligence is conditional on api key
Expand Down
1 change: 1 addition & 0 deletions src/shared/api.ts
Original file line number Diff line number Diff line change
Expand Up @@ -181,6 +181,7 @@ const dynamicProviderExtras = {
lmstudio: {} as {}, // eslint-disable-line @typescript-eslint/no-empty-object-type
roo: {} as { apiKey?: string; baseUrl?: string },
chutes: {} as { apiKey?: string },
keywordsai: {} as { apiKey?: string; baseUrl?: string },
} as const satisfies Record<RouterName, object>

// Build the dynamic options union from the map, intersected with CommonFetchParams
Expand Down
13 changes: 13 additions & 0 deletions webview-ui/src/components/settings/ApiOptions.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -106,6 +106,7 @@ import {
VercelAiGateway,
DeepInfra,
MiniMax,
KeywordsAI,
} from "./providers"

import { MODELS_BY_PROVIDER, PROVIDERS } from "./constants"
Expand Down Expand Up @@ -591,6 +592,18 @@ const ApiOptions = ({
/>
)}

{selectedProvider === "keywordsai" && (
<KeywordsAI
apiConfiguration={apiConfiguration}
setApiConfigurationField={setApiConfigurationField}
routerModels={routerModels}
refetchRouterModels={refetchRouterModels}
organizationAllowList={organizationAllowList}
modelValidationError={modelValidationError}
simplifySettings={fromWelcomeView}
/>
)}

{selectedProvider === "bedrock" && (
<Bedrock
apiConfiguration={apiConfiguration}
Expand Down
1 change: 1 addition & 0 deletions webview-ui/src/components/settings/constants.ts
Original file line number Diff line number Diff line change
Expand Up @@ -81,4 +81,5 @@ export const PROVIDERS = [
{ value: "vercel-ai-gateway", label: "Vercel AI Gateway", proxy: false },
{ value: "minimax", label: "MiniMax", proxy: false },
{ value: "baseten", label: "Baseten", proxy: false },
{ value: "keywordsai", label: "Keywords AI", proxy: false },
].sort((a, b) => a.label.localeCompare(b.label))
Loading