Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
10 changes: 10 additions & 0 deletions packages/types/src/provider-settings.ts
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,7 @@ import {
mistralModels,
moonshotModels,
openAiNativeModels,
poeModels,
qwenCodeModels,
sambaNovaModels,
vertexModels,
Expand Down Expand Up @@ -135,6 +136,7 @@ export const providerNames = [
"moonshot",
"minimax",
"openai-native",
"poe",
"qwen-code",
"roo",
"sambanova",
Expand Down Expand Up @@ -366,6 +368,10 @@ const groqSchema = apiModelIdProviderModelSchema.extend({
groqApiKey: z.string().optional(),
})

const poeSchema = apiModelIdProviderModelSchema.extend({
poeApiKey: z.string().optional(),
})

const huggingFaceSchema = baseProviderSettingsSchema.extend({
huggingFaceApiKey: z.string().optional(),
huggingFaceModelId: z.string().optional(),
Expand Down Expand Up @@ -460,6 +466,7 @@ export const providerSettingsSchemaDiscriminated = z.discriminatedUnion("apiProv
fakeAiSchema.merge(z.object({ apiProvider: z.literal("fake-ai") })),
xaiSchema.merge(z.object({ apiProvider: z.literal("xai") })),
groqSchema.merge(z.object({ apiProvider: z.literal("groq") })),
poeSchema.merge(z.object({ apiProvider: z.literal("poe") })),
basetenSchema.merge(z.object({ apiProvider: z.literal("baseten") })),
huggingFaceSchema.merge(z.object({ apiProvider: z.literal("huggingface") })),
chutesSchema.merge(z.object({ apiProvider: z.literal("chutes") })),
Expand Down Expand Up @@ -503,6 +510,7 @@ export const providerSettingsSchema = z.object({
...fakeAiSchema.shape,
...xaiSchema.shape,
...groqSchema.shape,
...poeSchema.shape,
...basetenSchema.shape,
...huggingFaceSchema.shape,
...chutesSchema.shape,
Expand Down Expand Up @@ -591,6 +599,7 @@ export const modelIdKeysByProvider: Record<TypicalProvider, ModelIdKey> = {
requesty: "requestyModelId",
xai: "apiModelId",
groq: "apiModelId",
poe: "apiModelId",
baseten: "apiModelId",
chutes: "apiModelId",
litellm: "litellmModelId",
Expand Down Expand Up @@ -680,6 +689,7 @@ export const MODELS_BY_PROVIDER: Record<
models: Object.keys(geminiModels),
},
groq: { id: "groq", label: "Groq", models: Object.keys(groqModels) },
poe: { id: "poe", label: "Poe", models: Object.keys(poeModels) },
"io-intelligence": {
id: "io-intelligence",
label: "IO Intelligence",
Expand Down
4 changes: 4 additions & 0 deletions packages/types/src/providers/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,7 @@ export * from "./moonshot.js"
export * from "./ollama.js"
export * from "./openai.js"
export * from "./openrouter.js"
export * from "./poe.js"
export * from "./qwen-code.js"
export * from "./requesty.js"
export * from "./roo.js"
Expand Down Expand Up @@ -51,6 +52,7 @@ import { litellmDefaultModelId } from "./lite-llm.js"
import { mistralDefaultModelId } from "./mistral.js"
import { moonshotDefaultModelId } from "./moonshot.js"
import { openRouterDefaultModelId } from "./openrouter.js"
import { poeDefaultModelId } from "./poe.js"
import { qwenCodeDefaultModelId } from "./qwen-code.js"
import { requestyDefaultModelId } from "./requesty.js"
import { rooDefaultModelId } from "./roo.js"
Expand Down Expand Up @@ -145,6 +147,8 @@ export function getProviderDefaultModelId(
return qwenCodeDefaultModelId
case "vercel-ai-gateway":
return vercelAiGatewayDefaultModelId
case "poe":
return poeDefaultModelId
case "anthropic":
case "gemini-cli":
case "human-relay":
Expand Down
205 changes: 205 additions & 0 deletions packages/types/src/providers/poe.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,205 @@
import type { ModelInfo } from "../model.js"

// https://creator.poe.com/docs/external-applications/openai-compatible-api
export type PoeModelId =
| "gpt-4o"
| "gpt-4o-mini"
| "gpt-4-turbo"
| "gpt-3.5-turbo"
| "claude-3-5-sonnet"
| "claude-3-5-haiku"
| "claude-3-opus"
| "claude-3-sonnet"
| "claude-3-haiku"
| "claude-instant"
| "gemini-1.5-pro"
| "gemini-1.5-flash"
| "llama-3.1-405b"
| "llama-3.1-70b"
| "llama-3.1-8b"
| "mistral-large"
| "mixtral-8x7b"
| "qwen-2.5-72b"
| "solar-mini"

export const poeDefaultModelId: PoeModelId = "claude-3-5-sonnet"

export const poeModels = {
// GPT Models
"gpt-4o": {
maxTokens: 16384,
contextWindow: 128000,
supportsImages: true,
supportsPromptCache: false,
inputPrice: 2.5,
outputPrice: 10,
description: "OpenAI's most advanced model with vision capabilities",
},
"gpt-4o-mini": {
maxTokens: 16384,
contextWindow: 128000,
supportsImages: true,
supportsPromptCache: false,
inputPrice: 0.15,
outputPrice: 0.6,
description: "Affordable and intelligent small model for fast, lightweight tasks",
},
"gpt-4-turbo": {
maxTokens: 4096,
contextWindow: 128000,
supportsImages: true,
supportsPromptCache: false,
inputPrice: 10,
outputPrice: 30,
description: "GPT-4 Turbo with vision capabilities",
},
"gpt-3.5-turbo": {
maxTokens: 4096,
contextWindow: 16385,
supportsImages: false,
supportsPromptCache: false,
inputPrice: 0.5,
outputPrice: 1.5,
description: "Fast and efficient model for most tasks",
},
// Claude Models
"claude-3-5-sonnet": {
maxTokens: 8192,
contextWindow: 200000,
supportsImages: true,
supportsPromptCache: false,
inputPrice: 3,
outputPrice: 15,
description: "Most intelligent Claude model with vision capabilities",
},
"claude-3-5-haiku": {
maxTokens: 4096,
contextWindow: 200000,
supportsImages: false,
Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Claude 3.5 Haiku supports vision capabilities according to Anthropic's documentation. This should be supportsImages: true to match the capability and be consistent with claude-3-haiku which correctly has vision support enabled.

Suggested change
supportsImages: false,
supportsImages: true,

Fix it with Roo Code or mention @roomote and request a fix.

supportsPromptCache: false,
inputPrice: 0.25,
outputPrice: 1.25,
description: "Fast and efficient Claude model",
},
"claude-3-opus": {
maxTokens: 4096,
contextWindow: 200000,
supportsImages: true,
supportsPromptCache: false,
inputPrice: 15,
outputPrice: 75,
description: "Most powerful Claude 3 model with vision capabilities",
},
"claude-3-sonnet": {
maxTokens: 4096,
contextWindow: 200000,
supportsImages: true,
supportsPromptCache: false,
inputPrice: 3,
outputPrice: 15,
description: "Balanced Claude 3 model with vision capabilities",
},
"claude-3-haiku": {
maxTokens: 4096,
contextWindow: 200000,
supportsImages: true,
supportsPromptCache: false,
inputPrice: 0.25,
outputPrice: 1.25,
description: "Fast and lightweight Claude 3 model with vision capabilities",
},
"claude-instant": {
maxTokens: 4096,
contextWindow: 100000,
supportsImages: false,
supportsPromptCache: false,
inputPrice: 0.8,
outputPrice: 2.4,
description: "Fast Claude model for simple tasks",
},
// Gemini Models
"gemini-1.5-pro": {
maxTokens: 8192,
contextWindow: 2000000,
supportsImages: true,
supportsPromptCache: false,
inputPrice: 3.5,
outputPrice: 10.5,
description: "Google's most capable model with 2M context window",
},
"gemini-1.5-flash": {
maxTokens: 8192,
contextWindow: 1000000,
supportsImages: true,
supportsPromptCache: false,
inputPrice: 0.075,
outputPrice: 0.3,
description: "Fast and efficient Gemini model with 1M context window",
},
// Llama Models
"llama-3.1-405b": {
maxTokens: 4096,
contextWindow: 128000,
supportsImages: false,
supportsPromptCache: false,
inputPrice: 2.7,
outputPrice: 2.7,
description: "Meta's largest open model with 405B parameters",
},
"llama-3.1-70b": {
maxTokens: 4096,
contextWindow: 128000,
supportsImages: false,
supportsPromptCache: false,
inputPrice: 0.59,
outputPrice: 0.79,
description: "Powerful open model with 70B parameters",
},
"llama-3.1-8b": {
maxTokens: 4096,
contextWindow: 128000,
supportsImages: false,
supportsPromptCache: false,
inputPrice: 0.05,
outputPrice: 0.08,
description: "Efficient open model with 8B parameters",
},
// Mistral Models
"mistral-large": {
maxTokens: 4096,
contextWindow: 32000,
supportsImages: false,
supportsPromptCache: false,
inputPrice: 2,
outputPrice: 6,
description: "Mistral's flagship model",
},
"mixtral-8x7b": {
maxTokens: 4096,
contextWindow: 32000,
supportsImages: false,
supportsPromptCache: false,
inputPrice: 0.24,
outputPrice: 0.24,
description: "Mixture of experts model with 8x7B parameters",
},
// Other Models
"qwen-2.5-72b": {
maxTokens: 4096,
contextWindow: 32000,
supportsImages: false,
supportsPromptCache: false,
inputPrice: 0.35,
outputPrice: 0.4,
description: "Alibaba's Qwen 2.5 model with 72B parameters",
},
"solar-mini": {
maxTokens: 4096,
contextWindow: 32000,
supportsImages: false,
supportsPromptCache: false,
inputPrice: 0.06,
outputPrice: 0.06,
description: "Small and efficient model",
},
} as const satisfies Record<string, ModelInfo>
3 changes: 3 additions & 0 deletions src/api/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -27,6 +27,7 @@ import {
FakeAIHandler,
XAIHandler,
GroqHandler,
PoeHandler,
HuggingFaceHandler,
ChutesHandler,
LiteLLMHandler,
Expand Down Expand Up @@ -163,6 +164,8 @@ export function buildApiHandler(configuration: ProviderSettings): ApiHandler {
return new XAIHandler(options)
case "groq":
return new GroqHandler(options)
case "poe":
return new PoeHandler(options)
case "deepinfra":
return new DeepInfraHandler(options)
case "huggingface":
Expand Down
1 change: 1 addition & 0 deletions src/api/providers/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,7 @@ export { OllamaHandler } from "./ollama"
export { OpenAiNativeHandler } from "./openai-native"
export { OpenAiHandler } from "./openai"
export { OpenRouterHandler } from "./openrouter"
export { PoeHandler } from "./poe"
export { QwenCodeHandler } from "./qwen-code"
export { RequestyHandler } from "./requesty"
export { SambaNovaHandler } from "./sambanova"
Expand Down
19 changes: 19 additions & 0 deletions src/api/providers/poe.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,19 @@
import { type PoeModelId, poeDefaultModelId, poeModels } from "@roo-code/types"

import type { ApiHandlerOptions } from "../../shared/api"

import { BaseOpenAiCompatibleProvider } from "./base-openai-compatible-provider"

export class PoeHandler extends BaseOpenAiCompatibleProvider<PoeModelId> {
constructor(options: ApiHandlerOptions) {
super({
...options,
providerName: "Poe",
baseURL: "https://api.poe.com/v1",
apiKey: options.poeApiKey,
defaultProviderModelId: poeDefaultModelId,
providerModels: poeModels,
defaultTemperature: 0.7,
})
}
}
1 change: 1 addition & 0 deletions src/shared/ProfileValidator.ts
Original file line number Diff line number Diff line change
Expand Up @@ -66,6 +66,7 @@ export class ProfileValidator {
case "deepseek":
case "xai":
case "groq":
case "poe":
case "sambanova":
case "chutes":
case "fireworks":
Expand Down
Loading