Skip to content
Merged
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
22 changes: 22 additions & 0 deletions packages/types/src/providers/groq.ts
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,8 @@ export type GroqModelId =
| "qwen/qwen3-32b"
| "deepseek-r1-distill-llama-70b"
| "moonshotai/kimi-k2-instruct"
| "openai/gpt-oss-120b"
| "openai/gpt-oss-20b"

export const groqDefaultModelId: GroqModelId = "llama-3.3-70b-versatile" // Defaulting to Llama3 70B Versatile

Expand Down Expand Up @@ -97,4 +99,24 @@ export const groqModels = {
outputPrice: 3.0,
description: "Moonshot AI Kimi K2 Instruct 1T model, 128K context.",
},
"openai/gpt-oss-120b": {
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

The PR description mentions that these models support tool use, browser search, code execution, and JSON object mode. Should we add these as boolean flags in the model info? For example:

maxTokens: 32766,
contextWindow: 131072,
supportsImages: false,
supportsPromptCache: false,
inputPrice: 0.15,
outputPrice: 0.75,
description:
"GPT-OSS 120B is OpenAI's flagship open source model, built on a Mixture-of-Experts (MoE) architecture with 20 billion parameters and 128 experts.",
},
"openai/gpt-oss-20b": {
maxTokens: 32768,
contextWindow: 131072,
supportsImages: false,
supportsPromptCache: false,
inputPrice: 0.1,
outputPrice: 0.5,
description:
"GPT-OSS 20B is OpenAI's flagship open source model, built on a Mixture-of-Experts (MoE) architecture with 20 billion parameters and 32 experts.",
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Is this description correct? The model name suggests 20B parameters total, but the description says "20 billion parameters and 32 experts". Should this be "GPT-OSS 20B is OpenAI's flagship open source model, built on a Mixture-of-Experts (MoE) architecture with 32 experts"?

Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

yes, shut up

},
} as const satisfies Record<string, ModelInfo>
Loading